From 72696e8a80e7b2145faa2dbb0ef679b5491ee337 Mon Sep 17 00:00:00 2001 From: wupg98 <106410199+wupg98@users.noreply.github.com> Date: Sat, 2 Sep 2023 07:59:34 +0200 Subject: [PATCH] Add files via upload web.archive.org/https://github.com/zeronet-enhanced/ZeroNet/ --- src/Config.py | 703 ++++++++ src/Translate/Translate.py | 135 ++ src/Translate/__init__.py | 1 + src/Translate/languages/da.json | 51 + src/Translate/languages/de.json | 51 + src/Translate/languages/es.json | 51 + src/Translate/languages/fa.json | 50 + src/Translate/languages/fr.json | 51 + src/Translate/languages/hu.json | 51 + src/Translate/languages/it.json | 51 + src/Translate/languages/jp.json | 66 + src/Translate/languages/nl.json | 51 + src/Translate/languages/pl.json | 54 + src/Translate/languages/pt-br.json | 57 + src/Translate/languages/ru.json | 51 + src/Translate/languages/sk.json | 57 + src/Translate/languages/sl.json | 51 + src/Translate/languages/tr.json | 51 + src/Translate/languages/zh-tw.json | 54 + src/Translate/languages/zh.json | 55 + src/Ui/UiRequest.py | 949 ++++++++++ src/Ui/UiServer.py | 206 +++ src/Ui/UiWebsocket.py | 1297 ++++++++++++++ src/Ui/__init__.py | 3 + src/Ui/media/Fixbutton.coffee | 32 + src/Ui/media/Infopanel.coffee | 57 + src/Ui/media/Loading.coffee | 91 + src/Ui/media/Notifications.coffee | 89 + src/Ui/media/Wrapper.coffee | 714 ++++++++ src/Ui/media/Wrapper.css | 230 +++ src/Ui/media/WrapperZeroFrame.coffee | 22 + src/Ui/media/ZeroSiteTheme.coffee | 49 + src/Ui/media/all.css | 269 +++ src/Ui/media/all.js | 2125 +++++++++++++++++++++++ src/Ui/media/img/apple-touch-icon.png | Bin 0 -> 8178 bytes src/Ui/media/img/favicon.ico | Bin 0 -> 1150 bytes src/Ui/media/img/favicon.psd | Bin 0 -> 52520 bytes src/Ui/media/img/loading-circle.gif | Bin 0 -> 2346 bytes src/Ui/media/img/loading.gif | Bin 0 -> 723 bytes src/Ui/media/img/logo-white.svg | 1 + src/Ui/media/img/logo.png | Bin 0 -> 11379 bytes src/Ui/media/img/logo.psd | Bin 0 -> 62132 bytes src/Ui/media/img/logo.svg | 1 + src/Ui/media/lib/00-jquery.min.js | 2 + src/Ui/media/lib/RateLimit.coffee | 14 + src/Ui/media/lib/Translate.coffee | 1 + src/Ui/media/lib/ZeroWebsocket.coffee | 95 + src/Ui/media/lib/jquery.cssanim.js | 34 + src/Ui/media/lib/jquery.csslater.coffee | 36 + src/Ui/media/lib/jquery.easing.js | 168 ++ src/Ui/template/site_add.html | 40 + src/Ui/template/wrapper.html | 103 ++ src/User/User.py | 176 ++ src/User/UserManager.py | 77 + src/User/__init__.py | 1 + src/Worker/Worker.py | 239 +++ src/Worker/WorkerManager.py | 600 +++++++ src/Worker/WorkerTaskManager.py | 122 ++ src/Worker/__init__.py | 2 + src/__init__.py | 0 src/loglevel_overrides.py | 9 + src/main.py | 603 +++++++ src/util/Cached.py | 68 + src/util/CircularIterator.py | 34 + src/util/Diff.py | 48 + src/util/Electrum.py | 39 + src/util/Event.py | 55 + src/util/Flag.py | 22 + src/util/GreenletManager.py | 44 + src/util/Msgpack.py | 101 ++ src/util/Noparallel.py | 202 +++ src/util/OpensslFindPatch.py | 69 + src/util/Platform.py | 36 + src/util/Pooled.py | 65 + src/util/QueryJson.py | 67 + src/util/RateLimit.py | 128 ++ src/util/SafeRe.py | 98 ++ src/util/SelectiveLogger.py | 43 + src/util/SocksProxy.py | 26 + src/util/ThreadPool.py | 180 ++ src/util/UpnpPunch.py | 395 +++++ src/util/__init__.py | 5 + src/util/helper.py | 357 ++++ 83 files changed, 12281 insertions(+) create mode 100644 src/Config.py create mode 100644 src/Translate/Translate.py create mode 100644 src/Translate/__init__.py create mode 100644 src/Translate/languages/da.json create mode 100644 src/Translate/languages/de.json create mode 100644 src/Translate/languages/es.json create mode 100644 src/Translate/languages/fa.json create mode 100644 src/Translate/languages/fr.json create mode 100644 src/Translate/languages/hu.json create mode 100644 src/Translate/languages/it.json create mode 100644 src/Translate/languages/jp.json create mode 100644 src/Translate/languages/nl.json create mode 100644 src/Translate/languages/pl.json create mode 100644 src/Translate/languages/pt-br.json create mode 100644 src/Translate/languages/ru.json create mode 100644 src/Translate/languages/sk.json create mode 100644 src/Translate/languages/sl.json create mode 100644 src/Translate/languages/tr.json create mode 100644 src/Translate/languages/zh-tw.json create mode 100644 src/Translate/languages/zh.json create mode 100644 src/Ui/UiRequest.py create mode 100644 src/Ui/UiServer.py create mode 100644 src/Ui/UiWebsocket.py create mode 100644 src/Ui/__init__.py create mode 100644 src/Ui/media/Fixbutton.coffee create mode 100644 src/Ui/media/Infopanel.coffee create mode 100644 src/Ui/media/Loading.coffee create mode 100644 src/Ui/media/Notifications.coffee create mode 100644 src/Ui/media/Wrapper.coffee create mode 100644 src/Ui/media/Wrapper.css create mode 100644 src/Ui/media/WrapperZeroFrame.coffee create mode 100644 src/Ui/media/ZeroSiteTheme.coffee create mode 100644 src/Ui/media/all.css create mode 100644 src/Ui/media/all.js create mode 100644 src/Ui/media/img/apple-touch-icon.png create mode 100644 src/Ui/media/img/favicon.ico create mode 100644 src/Ui/media/img/favicon.psd create mode 100644 src/Ui/media/img/loading-circle.gif create mode 100644 src/Ui/media/img/loading.gif create mode 100644 src/Ui/media/img/logo-white.svg create mode 100644 src/Ui/media/img/logo.png create mode 100644 src/Ui/media/img/logo.psd create mode 100644 src/Ui/media/img/logo.svg create mode 100644 src/Ui/media/lib/00-jquery.min.js create mode 100644 src/Ui/media/lib/RateLimit.coffee create mode 100644 src/Ui/media/lib/Translate.coffee create mode 100644 src/Ui/media/lib/ZeroWebsocket.coffee create mode 100644 src/Ui/media/lib/jquery.cssanim.js create mode 100644 src/Ui/media/lib/jquery.csslater.coffee create mode 100644 src/Ui/media/lib/jquery.easing.js create mode 100644 src/Ui/template/site_add.html create mode 100644 src/Ui/template/wrapper.html create mode 100644 src/User/User.py create mode 100644 src/User/UserManager.py create mode 100644 src/User/__init__.py create mode 100644 src/Worker/Worker.py create mode 100644 src/Worker/WorkerManager.py create mode 100644 src/Worker/WorkerTaskManager.py create mode 100644 src/Worker/__init__.py create mode 100644 src/__init__.py create mode 100644 src/loglevel_overrides.py create mode 100644 src/main.py create mode 100644 src/util/Cached.py create mode 100644 src/util/CircularIterator.py create mode 100644 src/util/Diff.py create mode 100644 src/util/Electrum.py create mode 100644 src/util/Event.py create mode 100644 src/util/Flag.py create mode 100644 src/util/GreenletManager.py create mode 100644 src/util/Msgpack.py create mode 100644 src/util/Noparallel.py create mode 100644 src/util/OpensslFindPatch.py create mode 100644 src/util/Platform.py create mode 100644 src/util/Pooled.py create mode 100644 src/util/QueryJson.py create mode 100644 src/util/RateLimit.py create mode 100644 src/util/SafeRe.py create mode 100644 src/util/SelectiveLogger.py create mode 100644 src/util/SocksProxy.py create mode 100644 src/util/ThreadPool.py create mode 100644 src/util/UpnpPunch.py create mode 100644 src/util/__init__.py create mode 100644 src/util/helper.py diff --git a/src/Config.py b/src/Config.py new file mode 100644 index 00000000..f4a230dd --- /dev/null +++ b/src/Config.py @@ -0,0 +1,703 @@ +import argparse +import sys +import os +import locale +import re +import configparser +import logging +import logging.handlers +import stat +import time + + +class Config(object): + + def __init__(self, argv): + self.version = "0.7.2" + self.rev = 4555 + self.argv = argv + self.action = None + self.test_parser = None + self.pending_changes = {} + self.need_restart = False + self.keys_api_change_allowed = set([ + "tor", "fileserver_port", "language", "tor_use_bridges", "trackers_proxy", "trackers", + "trackers_file", "open_browser", "log_level", "fileserver_ip_type", "ip_external", "offline", + "threads_fs_read", "threads_fs_write", "threads_crypt", "threads_db" + ]) + self.keys_restart_need = set([ + "tor", "fileserver_port", "fileserver_ip_type", "threads_fs_read", "threads_fs_write", "threads_crypt", "threads_db" + ]) + self.start_dir = self.getStartDir() + + self.config_file = self.start_dir + "/zeronet.conf" + self.data_dir = self.start_dir + "/data" + self.log_dir = self.start_dir + "/log" + self.openssl_lib_file = None + self.openssl_bin_file = None + + self.trackers_file = False + self.createParser() + self.createArguments() + + def createParser(self): + # Create parser + self.parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) + self.parser.register('type', 'bool', self.strToBool) + self.subparsers = self.parser.add_subparsers(title="Action to perform", dest="action") + + def __str__(self): + return str(self.arguments).replace("Namespace", "Config") # Using argparse str output + + # Convert string to bool + def strToBool(self, v): + return v.lower() in ("yes", "true", "t", "1") + + def getStartDir(self): + this_file = os.path.abspath(__file__).replace("\\", "/").rstrip("cd") + + if "--start_dir" in self.argv: + start_dir = self.argv[self.argv.index("--start_dir") + 1] + elif this_file.endswith("/Contents/Resources/core/src/Config.py"): + # Running as ZeroNet.app + if this_file.startswith("/Application") or this_file.startswith("/private") or this_file.startswith(os.path.expanduser("~/Library")): + # Runnig from non-writeable directory, put data to Application Support + start_dir = os.path.expanduser("~/Library/Application Support/ZeroNet") + else: + # Running from writeable directory put data next to .app + start_dir = re.sub("/[^/]+/Contents/Resources/core/src/Config.py", "", this_file) + elif this_file.endswith("/core/src/Config.py"): + # Running as exe or source is at Application Support directory, put var files to outside of core dir + start_dir = this_file.replace("/core/src/Config.py", "") + elif this_file.endswith("usr/share/zeronet/src/Config.py"): + # Running from non-writeable location, e.g., AppImage + start_dir = os.path.expanduser("~/ZeroNet") + else: + start_dir = "." + + return start_dir + + # Create command line arguments + def createArguments(self): + trackers = [ + # by zeroseed at http://127.0.0.1:43110/19HKdTAeBh5nRiKn791czY7TwRB1QNrf1Q/?:users/1HvNGwHKqhj3ZMEM53tz6jbdqe4LRpanEu:zn:dc17f896-bf3f-4962-bdd4-0a470040c9c5 + "zero://k5w77dozo3hy5zualyhni6vrh73iwfkaofa64abbilwyhhd3wgenbjqd.onion:15441", + "zero://2kcb2fqesyaevc4lntogupa4mkdssth2ypfwczd2ov5a3zo6ytwwbayd.onion:15441", + "zero://my562dxpjropcd5hy3nd5pemsc4aavbiptci5amwxzbelmzgkkuxpvid.onion:15441", + "zero://pn4q2zzt2pw4nk7yidxvsxmydko7dfibuzxdswi6gu6ninjpofvqs2id.onion:15441", + "zero://6i54dd5th73oelv636ivix6sjnwfgk2qsltnyvswagwphub375t3xcad.onion:15441", + "zero://tl74auz4tyqv4bieeclmyoe4uwtoc2dj7fdqv4nc4gl5j2bwg2r26bqd.onion:15441", + "zero://wlxav3szbrdhest4j7dib2vgbrd7uj7u7rnuzg22cxbih7yxyg2hsmid.onion:15441", + "zero://zy7wttvjtsijt5uwmlar4yguvjc2gppzbdj4v6bujng6xwjmkdg7uvqd.onion:15441", + + # ZeroNet 0.7.2 defaults: + "zero://boot3rdez4rzn36x.onion:15441", + "zero://zero.booth.moe#f36ca555bee6ba216b14d10f38c16f7769ff064e0e37d887603548cc2e64191d:443", # US/NY + "udp://tracker.coppersurfer.tk:6969", # DE + "udp://104.238.198.186:8000", # US/LA + "udp://retracker.akado-ural.ru:80", # RU + "http://h4.trakx.nibba.trade:80/announce", # US/VA + "http://open.acgnxtracker.com:80/announce", # DE + "http://tracker.bt4g.com:2095/announce", # Cloudflare + "zero://2602:ffc5::c5b2:5360:26312" # US/ATL + ] + # Platform specific + if sys.platform.startswith("win"): + coffeescript = "type %s | tools\\coffee\\coffee.cmd" + else: + coffeescript = None + + try: + language, enc = locale.getdefaultlocale() + language = language.lower().replace("_", "-") + if language not in ["pt-br", "zh-tw"]: + language = language.split("-")[0] + except Exception: + language = "en" + + use_openssl = True + + if repr(1483108852.565) != "1483108852.565": # Fix for weird Android issue + fix_float_decimals = True + else: + fix_float_decimals = False + + config_file = self.start_dir + "/zeronet.conf" + data_dir = self.start_dir + "/data" + log_dir = self.start_dir + "/log" + + ip_local = ["127.0.0.1", "::1"] + + # Main + action = self.subparsers.add_parser("main", help='Start UiServer and FileServer (default)') + + # SiteCreate + action = self.subparsers.add_parser("siteCreate", help='Create a new site') + action.register('type', 'bool', self.strToBool) + action.add_argument('--use_master_seed', help="Allow created site's private key to be recovered using the master seed in users.json (default: True)", type="bool", choices=[True, False], default=True) + + # SiteNeedFile + action = self.subparsers.add_parser("siteNeedFile", help='Get a file from site') + action.add_argument('address', help='Site address') + action.add_argument('inner_path', help='File inner path') + + # SiteDownload + action = self.subparsers.add_parser("siteDownload", help='Download a new site') + action.add_argument('address', help='Site address') + + # SiteSign + action = self.subparsers.add_parser("siteSign", help='Update and sign content.json: address [privatekey]') + action.add_argument('address', help='Site to sign') + action.add_argument('privatekey', help='Private key (default: ask on execute)', nargs='?') + action.add_argument('--inner_path', help='File you want to sign (default: content.json)', + default="content.json", metavar="inner_path") + action.add_argument('--remove_missing_optional', help='Remove optional files that is not present in the directory', action='store_true') + action.add_argument('--publish', help='Publish site after the signing', action='store_true') + + # SitePublish + action = self.subparsers.add_parser("sitePublish", help='Publish site to other peers: address') + action.add_argument('address', help='Site to publish') + action.add_argument('peer_ip', help='Peer ip to publish (default: random peers ip from tracker)', + default=None, nargs='?') + action.add_argument('peer_port', help='Peer port to publish (default: random peer port from tracker)', + default=15441, nargs='?') + action.add_argument('--inner_path', help='Content.json you want to publish (default: content.json)', + default="content.json", metavar="inner_path") + + # SiteVerify + action = self.subparsers.add_parser("siteVerify", help='Verify site files using sha512: address') + action.add_argument('address', help='Site to verify') + + # SiteCmd + action = self.subparsers.add_parser("siteCmd", help='Execute a ZeroFrame API command on a site') + action.add_argument('address', help='Site address') + action.add_argument('cmd', help='API command name') + action.add_argument('parameters', help='Parameters of the command', nargs='?') + + # dbRebuild + action = self.subparsers.add_parser("dbRebuild", help='Rebuild site database cache') + action.add_argument('address', help='Site to rebuild') + + # dbQuery + action = self.subparsers.add_parser("dbQuery", help='Query site sql cache') + action.add_argument('address', help='Site to query') + action.add_argument('query', help='Sql query') + + # PeerPing + action = self.subparsers.add_parser("peerPing", help='Send Ping command to peer') + action.add_argument('peer_ip', help='Peer ip') + action.add_argument('peer_port', help='Peer port', nargs='?') + + # PeerGetFile + action = self.subparsers.add_parser("peerGetFile", help='Request and print a file content from peer') + action.add_argument('peer_ip', help='Peer ip') + action.add_argument('peer_port', help='Peer port') + action.add_argument('site', help='Site address') + action.add_argument('filename', help='File name to request') + action.add_argument('--benchmark', help='Request file 10x then displays the total time', action='store_true') + + # PeerCmd + action = self.subparsers.add_parser("peerCmd", help='Request and print a file content from peer') + action.add_argument('peer_ip', help='Peer ip') + action.add_argument('peer_port', help='Peer port') + action.add_argument('cmd', help='Command to execute') + action.add_argument('parameters', help='Parameters to command', nargs='?') + + # CryptSign + action = self.subparsers.add_parser("cryptSign", help='Sign message using Bitcoin private key') + action.add_argument('message', help='Message to sign') + action.add_argument('privatekey', help='Private key') + + # Crypt Verify + action = self.subparsers.add_parser("cryptVerify", help='Verify message using Bitcoin public address') + action.add_argument('message', help='Message to verify') + action.add_argument('sign', help='Signiture for message') + action.add_argument('address', help='Signer\'s address') + + # Crypt GetPrivatekey + action = self.subparsers.add_parser("cryptGetPrivatekey", help='Generate a privatekey from master seed') + action.add_argument('master_seed', help='Source master seed') + action.add_argument('site_address_index', help='Site address index', type=int) + + action = self.subparsers.add_parser("getConfig", help='Return json-encoded info') + action = self.subparsers.add_parser("testConnection", help='Testing') + action = self.subparsers.add_parser("testAnnounce", help='Testing') + + self.test_parser = self.subparsers.add_parser("test", help='Run a test') + self.test_parser.add_argument('test_name', help='Test name', nargs="?") + # self.test_parser.add_argument('--benchmark', help='Run the tests multiple times to measure the performance', action='store_true') + + # Config parameters + self.parser.add_argument('--verbose', help='More detailed logging', action='store_true') + self.parser.add_argument('--debug', help='Debug mode', action='store_true') + self.parser.add_argument('--silent', help='Only log errors to terminal output', action='store_true') + self.parser.add_argument('--debug_socket', help='Debug socket connections', action='store_true') + self.parser.add_argument('--merge_media', help='Merge all.js and all.css', action='store_true') + + self.parser.add_argument('--batch', help="Batch mode (No interactive input for commands)", action='store_true') + + self.parser.add_argument('--start_dir', help='Path of working dir for variable content (data, log, .conf)', default=self.start_dir, metavar="path") + self.parser.add_argument('--config_file', help='Path of config file', default=config_file, metavar="path") + self.parser.add_argument('--data_dir', help='Path of data directory', default=data_dir, metavar="path") + + self.parser.add_argument('--console_log_level', help='Level of logging to console', default="default", choices=["default", "DEBUG", "INFO", "ERROR", "off"]) + + self.parser.add_argument('--log_dir', help='Path of logging directory', default=log_dir, metavar="path") + self.parser.add_argument('--log_level', help='Level of logging to file', default="DEBUG", choices=["DEBUG", "INFO", "ERROR", "off"]) + self.parser.add_argument('--log_rotate', help='Log rotate interval', default="daily", choices=["hourly", "daily", "weekly", "off"]) + self.parser.add_argument('--log_rotate_backup_count', help='Log rotate backup count', default=5, type=int) + + self.parser.add_argument('--language', help='Web interface language', default=language, metavar='language') + self.parser.add_argument('--ui_ip', help='Web interface bind address', default="127.0.0.1", metavar='ip') + self.parser.add_argument('--ui_port', help='Web interface bind port', default=43110, type=int, metavar='port') + self.parser.add_argument('--ui_restrict', help='Restrict web access', default=False, metavar='ip', nargs='*') + self.parser.add_argument('--ui_host', help='Allow access using this hosts', metavar='host', nargs='*') + self.parser.add_argument('--ui_trans_proxy', help='Allow access using a transparent proxy', action='store_true') + + self.parser.add_argument('--open_browser', help='Open homepage in web browser automatically', + nargs='?', const="default_browser", metavar='browser_name') + self.parser.add_argument('--homepage', help='Web interface Homepage', default='1HeLLoPVbqF3UEj8aWXErwTxrwkyjwGtZN', + metavar='address') + self.parser.add_argument('--updatesite', help='Source code update site', default='1uPDaT3uSyWAPdCv1WkMb5hBQjWSNNACf', + metavar='address') + self.parser.add_argument('--dist_type', help='Type of installed distribution', default='source') + + self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, type=int, metavar='limit') + self.parser.add_argument('--file_size_limit', help='Maximum per file size limit in MB', default=10, type=int, metavar='limit') + self.parser.add_argument('--connected_limit', help='Max number of connected peers per site. Soft limit.', default=10, type=int, metavar='connected_limit') + self.parser.add_argument('--global_connected_limit', help='Max number of connections. Soft limit.', default=512, type=int, metavar='global_connected_limit') + self.parser.add_argument('--workers', help='Download workers per site', default=5, type=int, metavar='workers') + + self.parser.add_argument('--site_announce_interval_min', help='Site announce interval for the most active sites, in minutes.', default=4, type=int, metavar='site_announce_interval_min') + self.parser.add_argument('--site_announce_interval_max', help='Site announce interval for inactive sites, in minutes.', default=30, type=int, metavar='site_announce_interval_max') + + self.parser.add_argument('--site_peer_check_interval_min', help='Connectable peers check interval for the most active sites, in minutes.', default=5, type=int, metavar='site_peer_check_interval_min') + self.parser.add_argument('--site_peer_check_interval_max', help='Connectable peers check interval for inactive sites, in minutes.', default=20, type=int, metavar='site_peer_check_interval_max') + + self.parser.add_argument('--site_update_check_interval_min', help='Site update check interval for the most active sites, in minutes.', default=5, type=int, metavar='site_update_check_interval_min') + self.parser.add_argument('--site_update_check_interval_max', help='Site update check interval for inactive sites, in minutes.', default=45, type=int, metavar='site_update_check_interval_max') + + self.parser.add_argument('--site_connectable_peer_count_max', help='Search for as many connectable peers for the most active sites', default=10, type=int, metavar='site_connectable_peer_count_max') + self.parser.add_argument('--site_connectable_peer_count_min', help='Search for as many connectable peers for inactive sites', default=2, type=int, metavar='site_connectable_peer_count_min') + + self.parser.add_argument('--send_back_lru_size', help='Size of the send back LRU cache', default=5000, type=int, metavar='send_back_lru_size') + self.parser.add_argument('--send_back_limit', help='Send no more than so many files at once back to peer, when we discovered that the peer held older file versions', default=3, type=int, metavar='send_back_limit') + + self.parser.add_argument('--expose_no_ownership', help='By default, ZeroNet tries checking updates for own sites more frequently. This can be used by a third party for revealing the network addresses of a site owner. If this option is enabled, ZeroNet performs the checks in the same way for any sites.', type='bool', choices=[True, False], default=False) + + self.parser.add_argument('--simultaneous_connection_throttle_threshold', help='Throttle opening new connections when the number of outgoing connections in not fully established state exceeds the threshold.', default=15, type=int, metavar='simultaneous_connection_throttle_threshold') + + self.parser.add_argument('--fileserver_ip', help='FileServer bind address', default="*", metavar='ip') + self.parser.add_argument('--fileserver_port', help='FileServer bind port (0: randomize)', default=0, type=int, metavar='port') + self.parser.add_argument('--fileserver_port_range', help='FileServer randomization range', default="10000-40000", metavar='port') + self.parser.add_argument('--fileserver_ip_type', help='FileServer ip type', default="dual", choices=["ipv4", "ipv6", "dual"]) + self.parser.add_argument('--ip_local', help='My local ips', default=ip_local, type=int, metavar='ip', nargs='*') + self.parser.add_argument('--ip_external', help='Set reported external ip (tested on start if None)', metavar='ip', nargs='*') + self.parser.add_argument('--offline', help='Disable network communication', action='store_true') + + self.parser.add_argument('--disable_udp', help='Disable UDP connections', action='store_true') + self.parser.add_argument('--proxy', help='Socks proxy address', metavar='ip:port') + self.parser.add_argument('--bind', help='Bind outgoing sockets to this address', metavar='ip') + self.parser.add_argument('--trackers', help='Bootstraping torrent trackers', default=trackers, metavar='protocol://address', nargs='*') + self.parser.add_argument('--trackers_file', help='Load torrent trackers dynamically from a file', metavar='path', nargs='*') + self.parser.add_argument('--trackers_proxy', help='Force use proxy to connect to trackers (disable, tor, ip:port)', default="disable") + self.parser.add_argument('--use_libsecp256k1', help='Use Libsecp256k1 liblary for speedup', type='bool', choices=[True, False], default=True) + self.parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup', type='bool', choices=[True, False], default=True) + self.parser.add_argument('--openssl_lib_file', help='Path for OpenSSL library file (default: detect)', default=argparse.SUPPRESS, metavar="path") + self.parser.add_argument('--openssl_bin_file', help='Path for OpenSSL binary file (default: detect)', default=argparse.SUPPRESS, metavar="path") + self.parser.add_argument('--disable_db', help='Disable database updating', action='store_true') + self.parser.add_argument('--disable_encryption', help='Disable connection encryption', action='store_true') + self.parser.add_argument('--force_encryption', help="Enforce encryption to all peer connections", action='store_true') + self.parser.add_argument('--disable_sslcompression', help='Disable SSL compression to save memory', + type='bool', choices=[True, False], default=True) + self.parser.add_argument('--keep_ssl_cert', help='Disable new SSL cert generation on startup', action='store_true') + self.parser.add_argument('--max_files_opened', help='Change maximum opened files allowed by OS to this value on startup', + default=2048, type=int, metavar='limit') + self.parser.add_argument('--stack_size', help='Change thread stack size', default=None, type=int, metavar='thread_stack_size') + self.parser.add_argument('--use_tempfiles', help='Use temporary files when downloading (experimental)', + type='bool', choices=[True, False], default=False) + self.parser.add_argument('--stream_downloads', help='Stream download directly to files (experimental)', + type='bool', choices=[True, False], default=False) + self.parser.add_argument("--msgpack_purepython", help='Use less memory, but a bit more CPU power', + type='bool', choices=[True, False], default=False) + self.parser.add_argument("--fix_float_decimals", help='Fix content.json modification date float precision on verification', + type='bool', choices=[True, False], default=fix_float_decimals) + self.parser.add_argument("--db_mode", choices=["speed", "security"], default="speed") + + self.parser.add_argument('--threads_fs_read', help='Number of threads for file read operations', default=1, type=int) + self.parser.add_argument('--threads_fs_write', help='Number of threads for file write operations', default=1, type=int) + self.parser.add_argument('--threads_crypt', help='Number of threads for cryptographic operations', default=2, type=int) + self.parser.add_argument('--threads_db', help='Number of threads for database operations', default=1, type=int) + + self.parser.add_argument("--download_optional", choices=["manual", "auto"], default="manual") + + self.parser.add_argument('--coffeescript_compiler', help='Coffeescript compiler for developing', default=coffeescript, + metavar='executable_path') + + self.parser.add_argument('--tor', help='enable: Use only for Tor peers, always: Use Tor for every connection', choices=["disable", "enable", "always"], default='enable') + self.parser.add_argument('--tor_controller', help='Tor controller address', metavar='ip:port', default='127.0.0.1:9051') + self.parser.add_argument('--tor_proxy', help='Tor proxy address', metavar='ip:port', default='127.0.0.1:9050') + self.parser.add_argument('--tor_password', help='Tor controller password', metavar='password') + self.parser.add_argument('--tor_use_bridges', help='Use obfuscated bridge relays to avoid Tor block', action='store_true') + self.parser.add_argument('--tor_hs_limit', help='Maximum number of hidden services in Tor always mode', metavar='limit', type=int, default=10) + self.parser.add_argument('--tor_hs_port', help='Hidden service port in Tor always mode', metavar='limit', type=int, default=15441) + + self.parser.add_argument('--version', action='version', version='ZeroNet %s r%s' % (self.version, self.rev)) + self.parser.add_argument('--end', help='Stop multi value argument parsing', action='store_true') + + return self.parser + + def loadTrackersFile(self): + if not self.trackers_file: + return None + + self.trackers = self.arguments.trackers[:] + + for trackers_file in self.trackers_file: + try: + if trackers_file.startswith("/"): # Absolute + trackers_file_path = trackers_file + elif trackers_file.startswith("{data_dir}"): # Relative to data_dir + trackers_file_path = trackers_file.replace("{data_dir}", self.data_dir) + else: # Relative to zeronet.py + trackers_file_path = self.start_dir + "/" + trackers_file + + for line in open(trackers_file_path): + tracker = line.strip() + if "://" in tracker and tracker not in self.trackers: + self.trackers.append(tracker) + except Exception as err: + print("Error loading trackers file: %s" % err) + + # Find arguments specified for current action + def getActionArguments(self): + back = {} + arguments = self.parser._subparsers._group_actions[0].choices[self.action]._actions[1:] # First is --version + for argument in arguments: + back[argument.dest] = getattr(self, argument.dest) + return back + + # Try to find action from argv + def getAction(self, argv): + actions = [list(action.choices.keys()) for action in self.parser._actions if action.dest == "action"][0] # Valid actions + found_action = False + for action in actions: # See if any in argv + if action in argv: + found_action = action + break + return found_action + + # Move plugin parameters to end of argument list + def moveUnknownToEnd(self, argv, default_action): + valid_actions = sum([action.option_strings for action in self.parser._actions], []) + valid_parameters = [] + plugin_parameters = [] + plugin = False + for arg in argv: + if arg.startswith("--"): + if arg not in valid_actions: + plugin = True + else: + plugin = False + elif arg == default_action: + plugin = False + + if plugin: + plugin_parameters.append(arg) + else: + valid_parameters.append(arg) + return valid_parameters + plugin_parameters + + def getParser(self, argv): + action = self.getAction(argv) + if not action: + return self.parser + else: + return self.subparsers.choices[action] + + # Parse arguments from config file and command line + def parse(self, silent=False, parse_config=True): + argv = self.argv[:] # Copy command line arguments + current_parser = self.getParser(argv) + if silent: # Don't display messages or quit on unknown parameter + original_print_message = self.parser._print_message + original_exit = self.parser.exit + + def silencer(parser, function_name): + parser.exited = True + return None + current_parser.exited = False + current_parser._print_message = lambda *args, **kwargs: silencer(current_parser, "_print_message") + current_parser.exit = lambda *args, **kwargs: silencer(current_parser, "exit") + + self.parseCommandline(argv, silent) # Parse argv + self.setAttributes() + if parse_config: + argv = self.parseConfig(argv) # Add arguments from config file + + self.parseCommandline(argv, silent) # Parse argv + self.setAttributes() + + if not silent: + if self.fileserver_ip != "*" and self.fileserver_ip not in self.ip_local: + self.ip_local.append(self.fileserver_ip) + + if silent: # Restore original functions + if current_parser.exited and self.action == "main": # Argument parsing halted, don't start ZeroNet with main action + self.action = None + current_parser._print_message = original_print_message + current_parser.exit = original_exit + + self.loadTrackersFile() + + # Parse command line arguments + def parseCommandline(self, argv, silent=False): + # Find out if action is specificed on start + action = self.getAction(argv) + if not action: + argv.append("--end") + argv.append("main") + action = "main" + argv = self.moveUnknownToEnd(argv, action) + if silent: + res = self.parser.parse_known_args(argv[1:]) + if res: + self.arguments = res[0] + else: + self.arguments = {} + else: + self.arguments = self.parser.parse_args(argv[1:]) + + # Parse config file + def parseConfig(self, argv): + # Find config file path from parameters + if "--config_file" in argv: + self.config_file = argv[argv.index("--config_file") + 1] + # Load config file + if os.path.isfile(self.config_file): + config = configparser.RawConfigParser(allow_no_value=True, strict=False) + config.read(self.config_file) + for section in config.sections(): + for key, val in config.items(section): + if val == "True": + val = None + if section != "global": # If not global prefix key with section + key = section + "_" + key + + if key == "open_browser": # Prefer config file value over cli argument + while "--%s" % key in argv: + pos = argv.index("--open_browser") + del argv[pos:pos + 2] + + argv_extend = ["--%s" % key] + if val: + for line in val.strip().split("\n"): # Allow multi-line values + argv_extend.append(line) + if "\n" in val: + argv_extend.append("--end") + + argv = argv[:1] + argv_extend + argv[1:] + return argv + + # Return command line value of given argument + def getCmdlineValue(self, key): + if key not in self.argv: + return None + argv_index = self.argv.index(key) + if argv_index == len(self.argv) - 1: # last arg, test not specified + return None + + return self.argv[argv_index + 1] + + # Expose arguments as class attributes + def setAttributes(self): + # Set attributes from arguments + if self.arguments: + args = vars(self.arguments) + for key, val in args.items(): + if type(val) is list: + val = val[:] + if key in ("data_dir", "log_dir", "start_dir", "openssl_bin_file", "openssl_lib_file"): + if val: + val = val.replace("\\", "/") + setattr(self, key, val) + + def loadPlugins(self): + from Plugin import PluginManager + + @PluginManager.acceptPlugins + class ConfigPlugin(object): + def __init__(self, config): + self.argv = config.argv + self.parser = config.parser + self.subparsers = config.subparsers + self.test_parser = config.test_parser + self.getCmdlineValue = config.getCmdlineValue + self.createArguments() + + def createArguments(self): + pass + + ConfigPlugin(self) + + def saveValue(self, key, value): + if not os.path.isfile(self.config_file): + content = "" + else: + content = open(self.config_file).read() + lines = content.splitlines() + + global_line_i = None + key_line_i = None + i = 0 + for line in lines: + if line.strip() == "[global]": + global_line_i = i + if line.startswith(key + " =") or line == key: + key_line_i = i + i += 1 + + if key_line_i and len(lines) > key_line_i + 1: + while True: # Delete previous multiline values + is_value_line = lines[key_line_i + 1].startswith(" ") or lines[key_line_i + 1].startswith("\t") + if not is_value_line: + break + del lines[key_line_i + 1] + + if value is None: # Delete line + if key_line_i: + del lines[key_line_i] + + else: # Add / update + if type(value) is list: + value_lines = [""] + [str(line).replace("\n", "").replace("\r", "") for line in value] + else: + value_lines = [str(value).replace("\n", "").replace("\r", "")] + new_line = "%s = %s" % (key, "\n ".join(value_lines)) + if key_line_i: # Already in the config, change the line + lines[key_line_i] = new_line + elif global_line_i is None: # No global section yet, append to end of file + lines.append("[global]") + lines.append(new_line) + else: # Has global section, append the line after it + lines.insert(global_line_i + 1, new_line) + + open(self.config_file, "w").write("\n".join(lines)) + + def getServerInfo(self): + from Plugin import PluginManager + import main + + info = { + "platform": sys.platform, + "fileserver_ip": self.fileserver_ip, + "fileserver_port": self.fileserver_port, + "ui_ip": self.ui_ip, + "ui_port": self.ui_port, + "version": self.version, + "rev": self.rev, + "language": self.language, + "debug": self.debug, + "plugins": PluginManager.plugin_manager.plugin_names, + + "log_dir": os.path.abspath(self.log_dir), + "data_dir": os.path.abspath(self.data_dir), + "src_dir": os.path.dirname(os.path.abspath(__file__)) + } + + try: + info["ip_external"] = main.file_server.port_opened + info["tor_enabled"] = main.file_server.tor_manager.enabled + info["tor_status"] = main.file_server.tor_manager.status + except Exception: + pass + + return info + + def initConsoleLogger(self): + if self.action == "main": + format = '[%(asctime)s] %(name)s %(message)s' + else: + format = '%(name)s %(message)s' + + if self.console_log_level == "default": + if self.silent: + level = logging.ERROR + elif self.debug: + level = logging.DEBUG + else: + level = logging.INFO + else: + level = logging.getLevelName(self.console_log_level) + + console_logger = logging.StreamHandler() + console_logger.setFormatter(logging.Formatter(format, "%H:%M:%S")) + console_logger.setLevel(level) + logging.getLogger('').addHandler(console_logger) + + def initFileLogger(self): + if self.action == "main": + log_file_path = "%s/debug.log" % self.log_dir + else: + log_file_path = "%s/cmd.log" % self.log_dir + + if self.log_rotate == "off": + file_logger = logging.FileHandler(log_file_path, "w", "utf-8") + else: + when_names = {"weekly": "w", "daily": "d", "hourly": "h"} + file_logger = logging.handlers.TimedRotatingFileHandler( + log_file_path, when=when_names[self.log_rotate], interval=1, backupCount=self.log_rotate_backup_count, + encoding="utf8" + ) + + if os.path.isfile(log_file_path): + file_logger.doRollover() # Always start with empty log file + file_logger.setFormatter(logging.Formatter('[%(asctime)s] %(levelname)-8s %(name)s %(message)s')) + file_logger.setLevel(logging.getLevelName(self.log_level)) + logging.getLogger('').setLevel(logging.getLevelName(self.log_level)) + logging.getLogger('').addHandler(file_logger) + + def initLogging(self, console_logging=None, file_logging=None): + if console_logging == None: + console_logging = self.console_log_level != "off" + + if file_logging == None: + file_logging = self.log_level != "off" + + # Create necessary files and dirs + if not os.path.isdir(self.log_dir): + os.mkdir(self.log_dir) + try: + os.chmod(self.log_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) + except Exception as err: + print("Can't change permission of %s: %s" % (self.log_dir, err)) + + # Make warning hidden from console + logging.WARNING = 15 # Don't display warnings if not in debug mode + logging.addLevelName(15, "WARNING") + + logging.getLogger('').name = "-" # Remove root prefix + + self.error_logger = ErrorLogHandler() + self.error_logger.setLevel(logging.getLevelName("ERROR")) + logging.getLogger('').addHandler(self.error_logger) + + if console_logging: + self.initConsoleLogger() + if file_logging: + self.initFileLogger() + + +class ErrorLogHandler(logging.StreamHandler): + def __init__(self): + self.lines = [] + return super().__init__() + + def emit(self, record): + self.lines.append([time.time(), record.levelname, self.format(record)]) + + def onNewRecord(self, record): + pass + + +config = Config(sys.argv) diff --git a/src/Translate/Translate.py b/src/Translate/Translate.py new file mode 100644 index 00000000..e73f9be1 --- /dev/null +++ b/src/Translate/Translate.py @@ -0,0 +1,135 @@ +import os +import json +import logging +import inspect +import re +import html +import string + +from Config import config + +translates = [] + + +class EscapeProxy(dict): + # Automatically escape the accessed string values + def __getitem__(self, key): + val = dict.__getitem__(self, key) + if type(val) in (str, str): + return html.escape(val) + elif type(val) is dict: + return EscapeProxy(val) + elif type(val) is list: + return EscapeProxy(enumerate(val)) # Convert lists to dict + else: + return val + + +class Translate(dict): + def __init__(self, lang_dir=None, lang=None): + if not lang_dir: + lang_dir = os.path.dirname(__file__) + "/languages/" + if not lang: + lang = config.language + self.lang = lang + self.lang_dir = lang_dir + self.setLanguage(lang) + self.formatter = string.Formatter() + + if config.debug: + # Auto reload FileRequest on change + from Debug import DebugReloader + DebugReloader.watcher.addCallback(self.load) + + translates.append(self) + + def setLanguage(self, lang): + self.lang = re.sub("[^a-z-]", "", lang) + self.lang_file = self.lang_dir + "%s.json" % lang + self.load() + + def __repr__(self): + return "" % self.lang + + def load(self): + if self.lang == "en": + data = {} + dict.__init__(self, data) + self.clear() + elif os.path.isfile(self.lang_file): + try: + data = json.load(open(self.lang_file, encoding="utf8")) + logging.debug("Loaded translate file: %s (%s entries)" % (self.lang_file, len(data))) + except Exception as err: + logging.error("Error loading translate file %s: %s" % (self.lang_file, err)) + data = {} + dict.__init__(self, data) + else: + data = {} + dict.__init__(self, data) + self.clear() + logging.debug("Translate file not exists: %s" % self.lang_file) + + def format(self, s, kwargs, nested=False): + kwargs["_"] = self + if nested: + back = self.formatter.vformat(s, [], kwargs) # PY3 TODO: Change to format_map + return self.formatter.vformat(back, [], kwargs) + else: + return self.formatter.vformat(s, [], kwargs) + + def formatLocals(self, s, nested=False): + kwargs = inspect.currentframe().f_back.f_locals + return self.format(s, kwargs, nested=nested) + + def __call__(self, s, kwargs=None, nested=False, escape=True): + if not kwargs: + kwargs = inspect.currentframe().f_back.f_locals + if escape: + kwargs = EscapeProxy(kwargs) + return self.format(s, kwargs, nested=nested) + + def __missing__(self, key): + return key + + def pluralize(self, value, single, multi): + if value > 1: + return self[multi].format(value) + else: + return self[single].format(value) + + def translateData(self, data, translate_table=None, mode="js"): + if not translate_table: + translate_table = self + + patterns = [] + for key, val in list(translate_table.items()): + if key.startswith("_("): # Problematic string: only match if called between _(" ") function + key = key.replace("_(", "").replace(")", "").replace(", ", '", "') + translate_table[key] = "|" + val + patterns.append(re.escape(key)) + + def replacer(match): + target = translate_table[match.group(1)] + if mode == "js": + if target and target[0] == "|": # Strict string match + if match.string[match.start() - 2] == "_": # Only if the match if called between _(" ") function + return '"' + target[1:] + '"' + else: + return '"' + match.group(1) + '"' + return '"' + target + '"' + else: + return match.group(0)[0] + target + match.group(0)[-1] + + if mode == "html": + pattern = '[">](' + "|".join(patterns) + ')["<]' + else: + pattern = '"(' + "|".join(patterns) + ')"' + data = re.sub(pattern, replacer, data) + + if mode == "html": + data = data.replace("lang={lang}", "lang=%s" % self.lang) # lang get parameter to .js file to avoid cache + + return data + +translate = Translate() diff --git a/src/Translate/__init__.py b/src/Translate/__init__.py new file mode 100644 index 00000000..ba0ab6d4 --- /dev/null +++ b/src/Translate/__init__.py @@ -0,0 +1 @@ +from .Translate import * \ No newline at end of file diff --git a/src/Translate/languages/da.json b/src/Translate/languages/da.json new file mode 100644 index 00000000..8e6f0845 --- /dev/null +++ b/src/Translate/languages/da.json @@ -0,0 +1,51 @@ +{ + "Congratulations, your port {0} is opened.
You are a full member of the ZeroNet network!": "Tillykke, din port ({0}) er åben.
Du er nu fuld klient på ZeroNet!", + "Tor mode active, every connection using Onion route.": "TOR er aktiv, alle forbindelser anvender Onions.", + "Successfully started Tor onion hidden services.": "OK. Startede TOR skjult onion service.", + "Unable to start hidden services, please check your config.": "Fejl. Kunne ikke starte TOR skjult onion service. Tjek din opsætning!", + "For faster connections open {0} port on your router.": "Åben port {0} på din router for hurtigere forbindelse.", + "Your connection is restricted. Please, open {0} port on your router": "Begrænset forbindelse. Åben venligst port {0} på din router", + "or configure Tor to become a full member of the ZeroNet network.": "eller opsæt TOR for fuld adgang til ZeroNet!", + + "Select account you want to use in this site:": "Vælg bruger til brug på denne side:", + "currently selected": "nuværende bruger", + "Unique to site": "Unik på siden", + + "Content signing failed": "Signering af indhold fejlede", + "Content publish queued for {0:.0f} seconds.": "Indhold i kø for offentliggørelse i {0:.0f} sekunder.", + "Content published to {0} peers.": "Indhold offentliggjort til {0} klienter.", + "No peers found, but your content is ready to access.": "Ingen klienter fundet, men dit indhold er klar til hentning.", + "Your network connection is restricted. Please, open {0} port": "Din forbindelse er begrænset. Åben venligst port {0}", + "on your router to make your site accessible for everyone.": "på din router for at dele din side med alle.", + "Content publish failed.": "Offentliggørelse af indhold fejlede.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Denne fil er endnu ikke delt færdig. Tidligere indhold kan gå tabt hvis du skriver til filen nu.", + "Write content anyway": "Del indhold alligevel", + "New certificate added:": "Nyt certifikat oprettet:", + "You current certificate:": "Dit nuværende certifikat: ", + "Change it to {auth_type}/{auth_user_name}@{domain}": "Skift certificat til {auth_type}/{auth_user_name}@{domain}", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certifikat ændret til {auth_type}/{auth_user_name}@{domain}.", + "Site cloned": "Side klonet", + + "You have successfully changed the web interface's language!": "OK. Du har nu skiftet sprog på web brugergrænsefladen!", + "Due to the browser's caching, the full transformation could take some minute.": "Pga. browser cache kan skift af sprog tage nogle minutter.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "Forbindelse til UiServer Websocket blev tabt. Genopretter forbindelse...", + "Connection with UiServer Websocket recovered.": "Forbindelse til UiServer Websocket genoprettet.", + "UiServer Websocket error, please reload the page.": "UiServer Websocket fejl. Genindlæs venligst siden (F5)!", + "   Connecting...": "   Opretter forbindelse...", + "Site size: ": "Side størrelse: ", + "MB is larger than default allowed ": "MB er større end den tilladte default ", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Åben side og sæt max side størrelse til \" + site_info.next_size_limit + \"MB", + " files needs to be downloaded": " filer skal downloades", + " downloaded": " downloadet", + " download failed": " download fejlede", + "Peers found: ": "Klienter fundet: ", + "No peers found": "Ingen klienter fundet", + "Running out of size limit (": "Siden fylder snart for meget (", + "Set limit to \" + site_info.next_size_limit + \"MB": "Ret max side størrelse til \" + site_info.next_size_limit + \"MB", + "Site size limit changed to {0}MB": "Max side størrelse ændret til {0}MB", + " New version of this page has just released.
Reload to see the modified content.": " Ny version af denne side er blevet offentliggjort.
Genindlæs venligst siden (F5) for at se nyt indhold!", + "This site requests permission:": "Denne side betyder om tilladdelse:", + "_(Accept)": "Tillad" + +} diff --git a/src/Translate/languages/de.json b/src/Translate/languages/de.json new file mode 100644 index 00000000..1cc63b74 --- /dev/null +++ b/src/Translate/languages/de.json @@ -0,0 +1,51 @@ +{ + "Congratulations, your port {0} is opened.
You are a full member of the ZeroNet network!": "Gratulation, dein Port {0} ist offen.
Du bist ein volles Mitglied des ZeroNet Netzwerks!", + "Tor mode active, every connection using Onion route.": "Tor Modus aktiv, jede Verbindung nutzt die Onion Route.", + "Successfully started Tor onion hidden services.": "Tor versteckte Dienste erfolgreich gestartet.", + "Unable to start hidden services, please check your config.": "Nicht möglich versteckte Dienste zu starten.", + "For faster connections open {0} port on your router.": "Für schnellere Verbindungen, öffne Port {0} auf deinem Router.", + "Your connection is restricted. Please, open {0} port on your router": "Deine Verbindung ist eingeschränkt. Bitte öffne Port {0} auf deinem Router", + "or configure Tor to become a full member of the ZeroNet network.": "oder konfiguriere Tor um ein volles Mitglied des ZeroNet Netzwerks zu werden.", + + "Select account you want to use in this site:": "Wähle das Konto, das du auf dieser Seite benutzen willst:", + "currently selected": "aktuell ausgewählt", + "Unique to site": "Eindeutig zur Seite", + + "Content signing failed": "Signierung des Inhalts fehlgeschlagen", + "Content publish queued for {0:.0f} seconds.": "Veröffentlichung des Inhalts um {0:.0f} Sekunden verzögert.", + "Content published to {0} peers.": "Inhalt zu {0} Peers veröffentlicht.", + "No peers found, but your content is ready to access.": "Keine Peers gefunden, aber dein Inhalt ist bereit zum Zugriff.", + "Your network connection is restricted. Please, open {0} port": "Deine Netzwerkverbindung ist beschränkt. Bitte öffne Port {0}", + "on your router to make your site accessible for everyone.": "auf deinem Router um deine Seite für Jeden zugänglich zu machen.", + "Content publish failed.": "Inhalt konnte nicht veröffentlicht werden.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Diese Datei wird noch synchronisiert. Wenn jetzt geschrieben wird geht der vorherige Inhalt verloren.", + "Write content anyway": "Inhalt trotzdem schreiben", + "New certificate added:": "Neues Zertifikat hinzugefügt:", + "You current certificate:": "Dein aktuelles Zertifikat:", + "Change it to {auth_type}/{auth_user_name}@{domain}": "Ändere es zu {auth_type}/{auth_user_name}@{domain}", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Zertifikat geändert zu: {auth_type}/{auth_user_name}@{domain}.", + "Site cloned": "Seite geklont", + + "You have successfully changed the web interface's language!": "Du hast die Sprache des Webinterface erfolgreich geändert!", + "Due to the browser's caching, the full transformation could take some minute.": "Aufgrund des Browsercaches kann die volle Transformation Minuten dauern.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "Die Verbindung mit UiServer Websocketist abgebrochen. Neu verbinden...", + "Connection with UiServer Websocket recovered.": "Die Verbindung mit UiServer Websocket wurde wiederhergestellt.", + "UiServer Websocket error, please reload the page.": "UiServer Websocket Fehler, bitte Seite neu laden.", + "   Connecting...": "   Verbinden...", + "Site size: ": "Seitengröße: ", + "MB is larger than default allowed ": "MB ist größer als der erlaubte Standart", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Öffne Seite und setze das Limit auf \" + site_info.next_size_limit + \"MB", + " files needs to be downloaded": " Dateien müssen noch heruntergeladen werden", + " downloaded": " heruntergeladen", + " download failed": " Herunterladen fehlgeschlagen", + "Peers found: ": "Peers gefunden: ", + "No peers found": "Keine Peers gefunden", + "Running out of size limit (": "Das Speicherlimit ist bald ausgeschöpft (", + "Set limit to \" + site_info.next_size_limit + \"MB": "Limit auf \" + site_info.next_size_limit + \"MB ändern", + "Site size limit changed to {0}MB": "Speicherlimit für diese Seite auf {0}MB geändert", + " New version of this page has just released.
Reload to see the modified content.": " Neue version dieser Seite wurde gerade veröffentlicht.
Lade die Seite neu um den geänderten Inhalt zu sehen.", + "This site requests permission:": "Diese Seite fordert rechte:", + "_(Accept)": "Genehmigen" + +} diff --git a/src/Translate/languages/es.json b/src/Translate/languages/es.json new file mode 100644 index 00000000..4cac077b --- /dev/null +++ b/src/Translate/languages/es.json @@ -0,0 +1,51 @@ +{ + "Congratulations, your port {0} is opened.
You are a full member of the ZeroNet network!": "¡Felicidades! tu puerto {0} está abierto.
¡Eres un miembro completo de la red Zeronet!", + "Tor mode active, every connection using Onion route.": "Modo Tor activado, cada conexión usa una ruta Onion.", + "Successfully started Tor onion hidden services.": "Tor ha iniciado satisfactoriamente la ocultación de los servicios onion.", + "Unable to start hidden services, please check your config.": "No se puedo iniciar los servicios ocultos, por favor comprueba tu configuración.", + "For faster connections open {0} port on your router.": "Para conexiones más rápidas abre el puerto {0} en tu router.", + "Your connection is restricted. Please, open {0} port on your router": "Tu conexión está limitada. Por favor, abre el puerto {0} en tu router", + "or configure Tor to become a full member of the ZeroNet network.": "o configura Tor para convertirte en un miembro completo de la red ZeroNet.", + + "Select account you want to use in this site:": "Selecciona la cuenta que quieres utilizar en este sitio:", + "currently selected": "actualmente seleccionada", + "Unique to site": "Única para el sitio", + + "Content signing failed": "Firma del contenido fallida", + "Content publish queued for {0:.0f} seconds.": "Publicación de contenido en cola durante {0:.0f} segundos.", + "Content published to {0} peers.": "Contenido publicado para {0} pares.", + "No peers found, but your content is ready to access.": "No se ha encontrado pares, pero tu contenido está listo para ser accedido.", + "Your network connection is restricted. Please, open {0} port": "Tu conexión de red está restringida. Por favor, abre el puerto{0}", + "on your router to make your site accessible for everyone.": "en tu router para hacer tu sitio accesible a todo el mundo.", + "Content publish failed.": "Publicación de contenido fallida.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Este archivo está aún sincronizado, si le escribes ahora el contenido previo podría perderse.", + "Write content anyway": "Escribir el contenido de todas formas", + "New certificate added:": "Nuevo certificado añadido:", + "You current certificate:": "Tu certificado actual:", + "Change it to {auth_type}/{auth_user_name}@{domain}": "Cambia esto a {auth_type}/{auth_user_name}@{domain}", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certificado cambiado a: {auth_type}/{auth_user_name}@{domain}.", + "Site cloned": "Sitio clonado", + + "You have successfully changed the web interface's language!": "¡Has cambiado con éxito el idioma de la interfaz web!", + "Due to the browser's caching, the full transformation could take some minute.": "Debido a la caché del navegador, la transformación completa podría llevar unos minutos.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "Se perdió la conexión con UiServer Websocket. Reconectando...", + "Connection with UiServer Websocket recovered.": "Conexión con UiServer Websocket recuperada.", + "UiServer Websocket error, please reload the page.": "Error de UiServer Websocket, por favor recarga la página.", + "   Connecting...": "   Conectando...", + "Site size: ": "Tamaño del sitio: ", + "MB is larger than default allowed ": "MB es más grande de lo permitido por defecto", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Abre tu sitio and establece el límite de tamaño a \" + site_info.next_size_limit + \"MBs", + " files needs to be downloaded": " Los archivos necesitan ser descargados", + " downloaded": " descargados", + " download failed": " descarga fallida", + "Peers found: ": "Pares encontrados: ", + "No peers found": "No se han encontrado pares", + "Running out of size limit (": "Superando el tamaño límite (", + "Set limit to \" + site_info.next_size_limit + \"MB": "Establece ellímite a \" + site_info.next_size_limit + \"MB ändern", + "Site size limit changed to {0}MB": "Límite de tamaño del sitio cambiado a {0}MBs", + " New version of this page has just released.
Reload to see the modified content.": " Se ha publicado una nueva versión de esta página .
Recarga para ver el contenido modificado.", + "This site requests permission:": "Este sitio solicita permiso:", + "_(Accept)": "Conceder" + +} diff --git a/src/Translate/languages/fa.json b/src/Translate/languages/fa.json new file mode 100644 index 00000000..e644247a --- /dev/null +++ b/src/Translate/languages/fa.json @@ -0,0 +1,50 @@ +{ + "Congratulations, your port {0} is opened.
You are a full member of the ZeroNet network!": "تبریک، درگاه {0} شما باز شده است.
شما یک عضو تمام شبکه ZeroNet هستید!", + "Tor mode active, every connection using Onion route.": "حالت Tor فعال است، هر ارتباط از مسیریابی پیاز (Onion) استفاده می‌کند.", + "Successfully started Tor onion hidden services.": "خدمات پنهان پیاز (Onion) Tor با موفقیت راه‌اندازی شد.", + "Unable to start hidden services, please check your config.": "قادر به راه‌اندازی خدمات پنهان نیستیم، لطفا تنظیمات خود را بررسی نمایید.", + "For faster connections open {0} port on your router.": "برای ارتباطات سریعتر درگاه {0} را بر روی مسیریاب (روتر) خود باز نمایید.", + "Your connection is restricted. Please, open {0} port on your router": "ارتباط شما محدود‌شده است. لطفا درگاه {0} را در مسیریاب (روتر) خود باز نمایید", + "or configure Tor to become a full member of the ZeroNet network.": "یا پیکربندی Tor را انجام دهید تا به یک عضو تمام شبکه ZeroNet تبدیل شوید.", + + "Select account you want to use in this site:": "حسابی را که می‌خواهید در این سایت استفاده کنید، انتخاب کنید:", + "currently selected": "در حال حاضر انتخاب‌شده", + "Unique to site": "مختص به سایت", + + "Content signing failed": "امضای محتوا با شکست مواجه شد", + "Content publish queued for {0:.0f} seconds.": "محتوا در صف انتشار با {0:.0f} ثانیه تاخیر قرار گرفت.", + "Content published to {0} peers.": "محتوا برای {0} تعداد همتا انتشار یافت.", + "No peers found, but your content is ready to access.": "همتایی یافت نشد، اما محتوای شما آماده دسترسی است.", + "Your network connection is restricted. Please, open {0} port": "ارتباط شبکه شما محدود‌شده است. لطفا درگاه {0} را", + "on your router to make your site accessible for everyone.": "در مسیریاب (روتر) خود باز کنید تا سایت خود را برای همه در دسترس قرار دهید.", + "Content publish failed.": "انتشار محتوا موفق نبود.", + "This file still in sync, if you write it now, then the previous content may be lost.": "این فایل همچنان همگام است، اگز شما آن را بنویسید، ممکن است محتوای قبلی از‌بین رود.", + "Write content anyway": "در هر صورت محتوا را بنویس", + "New certificate added:": "گواهی جدیدی افزوده شد:", + "You current certificate:": "گواهی فعلی شما:", + "Change it to {auth_type}/{auth_user_name}@{domain}": "تغییرش بده به {auth_type}/{auth_user_name}@{domain}", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "گواهینامه به: {auth_type}/{auth_user_name}@{domain} تغییر پیدا کرد.", + "Site cloned": "سایت همسان‌سازی شد", + + "You have successfully changed the web interface's language!": "شما با موفقیت زبان رابط وب را تغییر دادید!", + "Due to the browser's caching, the full transformation could take some minute.": "به دلیل ذخیره‌سازی در مرور‌گر، امکان دارد تغییر شکل کامل چند دقیقه طول بکشد.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "اتصال با UiServer Websocket قطع شد. اتصال دوباره...", + "Connection with UiServer Websocket recovered.": "ارتباط با UiServer Websocket دوباره بر‌قرار شد.", + "UiServer Websocket error, please reload the page.": "خطای UiServer Websocket, لطفا صفحه را دوباره بارگیری کنید.", + "   Connecting...": "   برقراری ارتباط...", + "Site size: ": "حجم سایت: ", + "MB is larger than default allowed ": "MB بیشتر از پیش‌فرض مجاز است ", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "سایت را باز کرده و محدوده حجم را به \" + site_info.next_size_limit + \"MB تنظیم کن", + " files needs to be downloaded": " فایل‌هایی که نیاز است، دانلود شوند", + " downloaded": " دانلود شد", + " download failed": " دانلود موفق نبود", + "Peers found: ": "چند همتا یافت شد: ", + "No peers found": "همتایی یافت نشد", + "Running out of size limit (": "عبور کرده از محدوده حجم (", + "Set limit to \" + site_info.next_size_limit + \"MB": "محدوده را به \" + site_info.next_size_limit + \"MB تنظیم کن", + "Site size limit changed to {0}MB": "محدوده حجم سایت به {0}MB تغییر کرد", + " New version of this page has just released.
Reload to see the modified content.": " نسخه جدیدی از این صفحه منتشر شده است.
برای مشاهده محتوای تغییر‌یافته دوباره بارگیری نمایید.", + "This site requests permission:": "این سایت درخواست مجوز می‌کند:", + "_(Accept)": "_(پذیرفتن)" +} diff --git a/src/Translate/languages/fr.json b/src/Translate/languages/fr.json new file mode 100644 index 00000000..b46ef2c3 --- /dev/null +++ b/src/Translate/languages/fr.json @@ -0,0 +1,51 @@ +{ + "Congratulations, your port {0} is opened.
You are a full member of the ZeroNet network!": "Félicitations, le port ({0}) est ouvert.
Vous êtes maintenant membre de ZeroNet!!", + "Tor mode active, every connection using Onion route.": "Tor actif, toutes les connexions utilisent un routage Onion.", + "Successfully started Tor onion hidden services.": "Tor activé avec succès.", + "Unable to start hidden services, please check your config.": "Impossible d'activer Tor, veuillez vérifier votre configuration.", + "For faster connections open {0} port on your router.": "Pour une meilleure connectivité, ouvrez le port {0} sur votre routeur.", + "Your connection is restricted. Please, open {0} port on your router": "Connectivité limitée. Veuillez ouvrir le port {0} sur votre routeur", + "or configure Tor to become a full member of the ZeroNet network.": "ou configurez Tor afin d'avoir accès aux pairs ZeroNet Onion.", + + "Select account you want to use in this site:": "Sélectionnez le compte que vous voulez utiliser pour ce site:", + "currently selected": "présentement sélectionné", + "Unique to site": "Unique au site", + + "Content signing failed": "Échec à la signature du contenu", + "Content publish queued for {0:.0f} seconds.": "Publication du contenu différée {0:.0f} secondes.", + "Content published to {0} peers.": "Contenu publié à {0} pairs.", + "No peers found, but your content is ready to access.": "Aucun pair trouvé, mais votre contenu est accessible.", + "Your network connection is restricted. Please, open {0} port": "Connectivité limitée. Veuillez ouvrir le port {0}", + "on your router to make your site accessible for everyone.": "sur votre routeur pour que votre site soit accessible à tous.", + "Content publish failed.": "Échec de la publication du contenu.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Ce fichier n'est pas à jour, si vous le modifiez maintenant une version antérieure pourrait être perdue.", + "Write content anyway": "Enregistrer quand même", + "New certificate added:": "Nouveau cetificat ajouté :", + "You current certificate:": "Votre certificat actuel :", + "Change it to {auth_type}/{auth_user_name}@{domain}": "Changer pour {auth_type}/{auth_user_name}@{domain}", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certificat changé pour : {auth_type}/{auth_user_name}@{domain}-ra.", + "Site cloned": "Site cloné", + + "You have successfully changed the web interface's language!": "Vous avez modifié la langue d'affichage avec succès!", + "Due to the browser's caching, the full transformation could take some minute.": "En fonction du cache du navigateur, la modification pourrait prendre quelques minutes.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "Connexion avec UiServer Websocket rompue. Reconnexion...", + "Connection with UiServer Websocket recovered.": "Connexion avec UiServer Websocket rétablie.", + "UiServer Websocket error, please reload the page.": "Erreur du UiServer Websocket, veuillez recharger la page.", + "   Connecting...": "   Connexion...", + "Site size: ": "Taille du site : ", + "MB is larger than default allowed ": "MB est plus large que la taille permise par défaut ", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Ouvrez le site et augmentez la taille maximale à \" + site_info.next_size_limit + \"MB-ra", + " files needs to be downloaded": " fichiers doivent être téléchargés", + " downloaded": " téléchargés", + " download failed": " échec de téléchargement", + "Peers found: ": "Pairs trouvés: ", + "No peers found": "Aucun pair trouvé", + "Running out of size limit (": "Vous approchez la taille maximale (", + "Set limit to \" + site_info.next_size_limit + \"MB": "Augmentez la taille maximale à \" + site_info.next_size_limit + \"MB", + "Site size limit changed to {0}MB": "Taille maximale du site changée à {0}MB", + " New version of this page has just released.
Reload to see the modified content.": " Une nouvelle version de cette page vient d'être publiée.
Rechargez pour voir les modifications.", + "This site requests permission:": "Ce site requiert une permission :", + "_(Accept)": "Autoriser" + +} diff --git a/src/Translate/languages/hu.json b/src/Translate/languages/hu.json new file mode 100644 index 00000000..f9487f1d --- /dev/null +++ b/src/Translate/languages/hu.json @@ -0,0 +1,51 @@ +{ + "Congratulations, your port {0} is opened.
You are a full member of the ZeroNet network!": "Gratulálunk, a portod ({0}) nyitva van.
Teljes értékű tagja vagy a hálózatnak!", + "Tor mode active, every connection using Onion route.": "Tor mód aktív, minden kapcsolat az Onion hálózaton keresztül történik.", + "Successfully started Tor onion hidden services.": "Sikeresen elindultak a Tor onion titkos szolgáltatások.", + "Unable to start hidden services, please check your config.": "Nem sikerült elindítani a Tor onion szolgáltatásokat. Kérjük, ellenőrizd a beállításokat!", + "For faster connections open {0} port on your router.": "A gyorsabb kapcsolatok érdekében nyisd ki a {0} portot a routereden.", + "Your connection is restricted. Please, open {0} port on your router": "A kapcsolatod korlátozott. Kérjük, nyisd ki a {0} portot a routereden", + "or configure Tor to become a full member of the ZeroNet network.": "vagy állítsd be a Tor kliensed, hogy teljes értékű tagja legyél a hálózatnak!", + + "Select account you want to use in this site:": "Válaszd ki az oldalhoz használt felhasználónevet:", + "currently selected": "jelenleg kijelölt", + "Unique to site": "Egyedi az oldalon", + + "Content signing failed": "Tartalom aláírása sikeretelen", + "Content publish queued for {0:.0f} seconds.": "Tartalom publikálása elhalasztva {0:.0f} másodperccel.", + "Content published to {0} peers.": "Tartalom publikálva {0} fél részére.", + "No peers found, but your content is ready to access.": "Aktív csatlakozási pont nem található, de a tartalmad készen áll a kiszolgálásra.", + "Your network connection is restricted. Please, open {0} port": "A kapcsolatod korlátozott. Kérjük, nyisd ki a {0} portot", + "on your router to make your site accessible for everyone.": "a routereden, hogy az oldalad mindenki számára elérhető legyen.", + "Content publish failed.": "Sikertelen tartalom publikálás.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Ez a fájl még letöltés alatt van, ha most felülírod a korábbi tartalma elveszhet.", + "Write content anyway": "Felülírás", + "New certificate added:": "Új tanúsítvány hozzáadva:", + "You current certificate:": "A jelenlegi tanúsítványod: ", + "Change it to {auth_type}/{auth_user_name}@{domain}": "Változtatás {auth_type}/{auth_user_name}@{domain}-ra", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "A tanúsítvány megváltozott {auth_type}/{auth_user_name}@{domain}-ra.", + "Site cloned": "Az oldal klónozva", + + "You have successfully changed the web interface's language!": "Sikeresen átállítottad a web felület nyelvét!", + "Due to the browser's caching, the full transformation could take some minute.": "A böngésző cache-elése miatt egy pár percig eltarthat a teljes átállás.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "Az UiServer Websocket kapcsolat megszakadt. Újracsatlakozás...", + "Connection with UiServer Websocket recovered.": "Az UiServer Websocket kapcsolat visszaállt.", + "UiServer Websocket error, please reload the page.": "UiServer Websocket hiba, töltsd újra az oldalt!", + "   Connecting...": "   Csatlakozás...", + "Site size: ": "Oldal mérete: ", + "MB is larger than default allowed ": "MB nagyobb, mint az engedélyezett ", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Az oldal megnyitása és a korlát módosítása \" + site_info.next_size_limit + \"MB-ra", + " files needs to be downloaded": " fájlt kell letölteni", + " downloaded": " letöltve", + " download failed": " letöltés sikertelen", + "Peers found: ": "Talált csatlakozási pontok: ", + "No peers found": "Nincs csatlakozási pont", + "Running out of size limit (": "Az oldal hamarosan eléri a méretkorlátot (", + "Set limit to \" + site_info.next_size_limit + \"MB": "A korlát módosítása \" + site_info.next_size_limit + \"MB-ra", + "Site size limit changed to {0}MB": "A méretkorlát módosítva {0}MB-ra", + " New version of this page has just released.
Reload to see the modified content.": "Az oldal épp most módosult
A megváltozott tartalomért töltsd újra!", + "This site requests permission:": "Az oldal megtekintéséhez szükséges jog:", + "_(Accept)": "Engedélyezés" + +} diff --git a/src/Translate/languages/it.json b/src/Translate/languages/it.json new file mode 100644 index 00000000..47992328 --- /dev/null +++ b/src/Translate/languages/it.json @@ -0,0 +1,51 @@ +{ + "Congratulations, your port {0} is opened.
You are a full member of the ZeroNet network!": "Congratulazioni, la tua porta {0} è aperta.
Ora sei un membro effettivo della rete ZeroNet!", + "Tor mode active, every connection using Onion route.": "Modalità Tor attiva, ogni connessione sta usando la rete Onion.", + "Successfully started Tor onion hidden services.": "Servizi Tor onion nascosti avviati con successo.", + "Unable to start hidden services, please check your config.": "Impossibile avviare i servizi nascosti. Si prega di controllare la propria configurazione!", + "For faster connections open {0} port on your router.": "Per avere connessioni più veloci aprire la porta {0} sul router.", + "Your connection is restricted. Please, open {0} port on your router": "La tua connessione è limitata. Aprire la porta {0} sul router", + "or configure Tor to become a full member of the ZeroNet network.": "o configurare Tor per diventare membro effettivo della rete ZeroNet!", + + "Select account you want to use in this site:": "Seleziona l'account che vuoi utilizzare per questo sito:", + "currently selected": "attualmente selezionato", + "Unique to site": "Unico sul sito", + + "Content signing failed": "Firma contenuti fallita", + "Content publish queued for {0:.0f} seconds.": "Pubblicazione contenuti in coda per {0:.0f} secondi.", + "Content published to {0} peers.": "Contenuti pubblicati su {0} peer.", + "No peers found, but your content is ready to access.": "Nessun peer trovato, ma i tuoi contenuti sono pronti per l'accesso.", + "Your network connection is restricted. Please, open {0} port": "La tua connessione di rete è limitata. Aprire la porta {0} ", + "on your router to make your site accessible for everyone.": "sul router, per rendere il sito accessibile a chiunque.", + "Content publish failed.": "Pubblicazione contenuti fallita.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Questo file è ancora in sincronizzazione, se viene modificato i contenuti precedenti andranno persi.", + "Write content anyway": "Scrivere comunque i contenuti", + "New certificate added:": "Aggiunto nuovo certificato:", + "You current certificate:": "Il tuo attuale certificato:", + "Change it to {auth_type}/{auth_user_name}@{domain}": "Cambiarlo in {auth_type}/{auth_user_name}@{domain}", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certificato cambianto in: {auth_type}/{auth_user_name}@{domain}.", + "Site cloned": "Sito clonato", + + "You have successfully changed the web interface's language!": "Hai cambiato con successo la lingua dell'interfaccia web!", + "Due to the browser's caching, the full transformation could take some minute.": "La trasformazione completa potrebbe richiedre alcuni minuti a causa della cache del browser.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "La connessione con UiServer Websocket è andata persa. Riconnessione...", + "Connection with UiServer Websocket recovered.": "Connessione con UiServer Websocket recuperata.", + "UiServer Websocket error, please reload the page.": "Errore UiServer Websocket, ricaricare la pagina!", + "   Connecting...": "   Connessione...", + "Site size: ": "Dimensione del sito: ", + "MB is larger than default allowed ": "MB è più grande del valore predefinito consentito ", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Aprire il sito e impostare la dimensione limite a \" + site_info.next_size_limit + \"MB", + " files needs to be downloaded": " i file devono essere scaricati", + " downloaded": " scaricati", + " download failed": " scaricamento fallito", + "Peers found: ": "Peer trovati: ", + "No peers found": "Nessun peer trovato", + "Running out of size limit (": "Superato il limite di spazio (", + "Set limit to \" + site_info.next_size_limit + \"MB": "Imposta il limite a \" + site_info.next_size_limit + \"MB", + "Site size limit changed to {0}MB": "Limite di spazio cambiato a {0}MB", + " New version of this page has just released.
Reload to see the modified content.": "E' stata rilasciata una nuova versione di questa pagina
Ricaricare per vedere il contenuto modificato!", + "This site requests permission:": "Questo sito richiede permessi:", + "_(Accept)": "Concedere" + +} diff --git a/src/Translate/languages/jp.json b/src/Translate/languages/jp.json new file mode 100644 index 00000000..ff10aee4 --- /dev/null +++ b/src/Translate/languages/jp.json @@ -0,0 +1,66 @@ +{ + "Congratulations, your port {0} is opened.
You are a full member of the ZeroNet network!": "おめでとうございます。ポート {0} が開きました。これでZeroNetネットワークのメンバーです。", + "Tor mode active, every connection using Onion route.": "Torモードがアクティブです、全ての接続はOnionルートを使用します。", + "Successfully started Tor onion hidden services.": "Tor onionサービスを正常に開始しました。", + "Unable to start hidden services, please check your config.": "非表示のサービスを開始できません。設定を確認してください。", + "For faster connections open {0} port on your router.": "接続を高速化するにはルーターのポート {0} を開けてください。", + "Your connection is restricted. Please, open {0} port on your router": "接続が制限されています。ルーターのポート {0} を開けてください。", + "or configure Tor to become a full member of the ZeroNet network.": "または、TorをZeroNetネットワークのメンバーになるように設定してください。", + + "Select account you want to use in this site:": "このサイトで使用するアカウントを選択:", + "No certificate": "証明書がありません", + "currently selected": "現在選択中", + "Unique to site": "サイト固有", + + "Content signing failed": "コンテンツの署名に失敗", + "Content publish queued for {0:.0f} seconds.": "コンテンツの公開は{0:.0f}秒のキューに入れられました。", + "Content published to {0}/{1} peers.": "サイトの更新を通知済 {0}/{1} ピア", + "Content published to {0} peers.": "{0}ピアに公開されたコンテンツ。", + "No peers found, but your content is ready to access.": "ピアは見つかりませんでしたが、コンテンツにアクセスする準備ができました。", + "Your network connection is restricted. Please, open {0} port": "ネットワーク接続が制限されています。ポート {0} を開いて、", + "on your router to make your site accessible for everyone.": "誰でもサイトにアクセスできるようにしてください。", + "Content publish failed.": "コンテンツの公開に失敗しました。", + "This file still in sync, if you write it now, then the previous content may be lost.": "このファイルはまだ同期しています。今すぐ書き込むと、前のコンテンツが失われる可能性があります。", + "Write content anyway": "とにかくコンテンツを書く", + "New certificate added:": "新しい証明書が追加されました:", + "You current certificate:": "現在の証明書:", + "Change it to {auth_type}/{auth_user_name}@{domain}": "{auth_type}/{auth_user_name}@{domain} に変更", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "変更後の証明書: {auth_type}/{auth_user_name}@{domain}", + "Site cloned": "複製されたサイト", + + "You have successfully changed the web interface's language!": "Webインターフェースの言語が正常に変更されました!", + "Due to the browser's caching, the full transformation could take some minute.": "ブラウザのキャッシュにより、完全な変換には数分かかる場合があります。", + + "Connection with UiServer Websocket was lost. Reconnecting...": "UiServer Websocketとの接続が失われました。再接続しています...", + "Connection with UiServer Websocket recovered.": "UiServer Websocketとの接続が回復しました。", + "UiServer Websocket error, please reload the page.": "UiServer Websocketエラー、ページをリロードしてください。", + "   Connecting...": "   接続しています...", + "Site size: ": "サイトサイズ: ", + "MB is larger than default allowed ": "MBはデフォルトの許容値よりも大きいです。 ", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "サイトを開き、サイズ制限を \" + site_info.next_size_limit + \"MB に設定", + " files needs to be downloaded": " ファイルをダウンロードする必要があります", + " downloaded": " ダウンロード", + " download failed": " ダウンロード失敗", + "Peers found: ": "ピアが見つかりました: ", + "No peers found": "ピアが見つかりません", + "Running out of size limit (": "サイズ制限を使い果たしました (", + "Set limit to \" + site_info.next_size_limit + \"MB": "制限を \" + site_info.next_size_limit + \"MB に設定", + "Cloning site...": "サイトを複製中…", + "Site size limit changed to {0}MB": "サイトのサイズ制限が {0}MB に変更されました", + " New version of this page has just released.
Reload to see the modified content.": " このページの新しいバージョンが公開されました。
変更されたコンテンツを見るには再読み込みしてください。", + "This site requests permission:": "このサイトは権限を要求しています:", + "_(Accept)": "_(許可)", + + "Save": "保存", + "Trackers announcing": "トラッカーをお知らせ", + "Error": "エラー", + "Done": "完了", + "Tracker connection error detected.": "トラッカー接続エラーが検出されました。", + + "Update ZeroNet client to latest version?": "ZeroNetクライアントを最新版に更新しますか?", + "Update": "更新", + "Restart ZeroNet client?": "ZeroNetクライアントを再起動しますか?", + "Restart": "再起動", + "Shut down ZeroNet client?": "ZeroNetクライアントを終了しますか?", + "Shut down": "終了" +} diff --git a/src/Translate/languages/nl.json b/src/Translate/languages/nl.json new file mode 100644 index 00000000..985cce7a --- /dev/null +++ b/src/Translate/languages/nl.json @@ -0,0 +1,51 @@ +{ + "Congratulations, your port {0} is opened.
You are a full member of the ZeroNet network!": "Gefeliciteerd, je poort {0} is geopend.
Je bent een volledig lid van het ZeroNet netwerk!", + "Tor mode active, every connection using Onion route.": "Tor modus actief, elke verbinding gebruikt een Onion route.", + "Successfully started Tor onion hidden services.": "Tor onion verborgen diensten zijn met succes gestart.", + "Unable to start hidden services, please check your config.": "Het was niet mogelijk om verborgen diensten te starten, controleer je configuratie.", + "For faster connections open {0} port on your router.": "Voor snellere verbindingen open je de poort {0} op je router.", + "Your connection is restricted. Please, open {0} port on your router": "Je verbinding is beperkt. Open altjeblieft poort {0} op je router", + "or configure Tor to become a full member of the ZeroNet network.": "of configureer Tor om een volledig lid van het ZeroNet netwerk te worden.", + + "Select account you want to use in this site:": "Selecteer het account die je wilt gebruiken binnen deze site:", + "currently selected": "huidige selectie", + "Unique to site": "Uniek voor deze site", + + "Content signing failed": "Inhoud ondertekenen mislukt", + "Content publish queued for {0:.0f} seconds.": "Publiceren van inhoud staat in de wachtrij voor {0:.0f} seconden.", + "Content published to {0} peers.": "Inhoud is gepubliceerd naar {0} peers", + "No peers found, but your content is ready to access.": "Geen peers gevonden, maar je inhoud is klaar voor toegang.", + "Your network connection is restricted. Please, open {0} port": "Je netwerkverbinding is beperkt. Open alsjeblieft poort {0}", + "on your router to make your site accessible for everyone.": "op je router om je site toegankelijk te maken voor iedereen.", + "Content publish failed.": "Inhoud publicatie mislukt.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Dit bestand is nog in sync, als je het nu overschrijft, dan is mogelijk de vorige inhoud verloren.", + "Write content anyway": "Inhoud toch schrijven", + "New certificate added:": "Nieuw certificaat toegevoegd:", + "You current certificate:": "Je huidige certificaat:", + "Change it to {auth_type}/{auth_user_name}@{domain}": "Verander het naar {auth_type}/{auth_user_name}@{domain}", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certificaat veranderd naar: {auth_type}/{auth_user_name}@{domain}.", + "Site cloned": "Site gecloned", + + "You have successfully changed the web interface's language!": "Je hebt met succes de taal van de web interface aangepast!", + "Due to the browser's caching, the full transformation could take some minute.": "Door caching van je browser kan de volledige transformatie enkele minuten duren.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "Verbinding met UiServer Websocket verbroken. Opnieuw verbinden...", + "Connection with UiServer Websocket recovered.": "Verbinding met UiServer Websocket hersteld.", + "UiServer Websocket error, please reload the page.": "UiServer Websocket fout, herlaad alsjeblieft de pagina.", + "   Connecting...": "   Verbinden...", + "Site size: ": "Site grootte ", + "MB is larger than default allowed ": "MB is groter dan de standaard toegestaan ", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Open de site en stel de limeit op de grootte in op \" + site_info.next_size_limit + \"MB", + " files needs to be downloaded": " bestanden moeten worden gedownload", + " downloaded": " gedownload", + " download failed": " download mislukt", + "Peers found: ": "Peers gevonden: ", + "No peers found": "Geen peers gevonden", + "Running out of size limit (": "Limeit op grootte bereikt (", + "Set limit to \" + site_info.next_size_limit + \"MB": "Stel limiet in op \" + site_info.next_size_limit + \"MB", + "Site size limit changed to {0}MB": "Site limiet op grootte is veranderd naar {0}MB", + " New version of this page has just released.
Reload to see the modified content.": " Een nieuwe versie van deze pagina is zojuist uitgekomen.
Herlaad de pagina om de bijgewerkte inhoud te zien.", + "This site requests permission:": "Deze site vraagt om permissie:", + "_(Accept)": "Toekennen" + +} diff --git a/src/Translate/languages/pl.json b/src/Translate/languages/pl.json new file mode 100644 index 00000000..679e909d --- /dev/null +++ b/src/Translate/languages/pl.json @@ -0,0 +1,54 @@ +{ + "Congratulations, your port {0} is opened.
You are a full member of the ZeroNet network!": "Gratulacje, twój port {0} jest otwarty.
Jesteś pełnoprawnym użytkownikiem sieci ZeroNet!", + "Tor mode active, every connection using Onion route.": "Tryb Tor aktywny, każde połączenie przy użyciu trasy Cebulowej.", + "Successfully started Tor onion hidden services.": "Pomyślnie zainicjowano ukryte usługi cebulowe Tor.", + "Unable to start hidden services, please check your config.": "Niezdolny do uruchomienia ukrytych usług, proszę sprawdź swoją konfigurację.", + "For faster connections open {0} port on your router.": "Dla szybszego połączenia otwórz {0} port w swoim routerze.", + "Your connection is restricted. Please, open {0} port on your router": "Połączenie jest ograniczone. Proszę, otwórz port {0} w swoim routerze", + "or configure Tor to become a full member of the ZeroNet network.": "bądź skonfiguruj Tora by stać się pełnoprawnym użytkownikiem sieci ZeroNet.", + + "Select account you want to use in this site:": "Wybierz konto którego chcesz użyć na tej stronie:", + "currently selected": "aktualnie wybrany", + "Unique to site": "Unikatowy dla strony", + + "Content signing failed": "Podpisanie treści zawiodło", + "Content publish queued for {0:.0f} seconds.": "Publikacja treści wstrzymana na {0:.0f} sekund(y).", + "Content published to {0} peers.": "Treść opublikowana do {0} uzytkowników.", + "No peers found, but your content is ready to access.": "Nie odnaleziono użytkowników, ale twoja treść jest dostępna.", + "Your network connection is restricted. Please, open {0} port": "Twoje połączenie sieciowe jest ograniczone. Proszę, otwórz port {0}", + "on your router to make your site accessible for everyone.": "w swoim routerze, by twoja strona mogłabyć dostępna dla wszystkich.", + "Content publish failed.": "Publikacja treści zawiodła.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Ten plik wciąż się synchronizuje, jeśli zapiszesz go teraz, poprzednia treść może zostać utracona.", + "Write content anyway": "Zapisz treść mimo wszystko", + "New certificate added:": "Nowy certyfikat dodany:", + "You current certificate:": "Twój aktualny certyfikat: ", + "Change it to {auth_type}/{auth_user_name}@{domain}": "Zmień na {auth_type}/{auth_user_name}@{domain}-ra", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certyfikat zmieniony na {auth_type}/{auth_user_name}@{domain}-ra.", + "Site cloned": "Strona sklonowana", + + "You have successfully changed the web interface's language!": "Pomyślnie zmieniono język interfejsu stron!", + "Due to the browser's caching, the full transformation could take some minute.": "Ze względu na buforowanie przeglądarki, pełna zmiana może zająć parę minutę.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "Połączenie z UiServer Websocket zostało przerwane. Ponowne łączenie...", + "Connection with UiServer Websocket recovered.": "Połączenie z UiServer Websocket przywrócone.", + "UiServer Websocket error, please reload the page.": "Błąd UiServer Websocket, prosze odświeżyć stronę.", + "   Connecting...": "   Łączenie...", + "Site size: ": "Rozmiar strony: ", + "MB is larger than default allowed ": "MB jest większy niż domyślnie dozwolony ", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Otwórz stronę i ustaw limit na \" + site_info.next_size_limit + \"MBów", + " files needs to be downloaded": " pliki muszą zostać ściągnięte", + " downloaded": " ściągnięte", + " download failed": " ściąganie nie powiodło się", + "Peers found: ": "Odnaleziono użytkowników: ", + "No peers found": "Nie odnaleziono użytkowników", + "Running out of size limit (": "Limit rozmiaru na wyczerpaniu (", + "Set limit to \" + site_info.next_size_limit + \"MB": "Ustaw limit na \" + site_info.next_size_limit + \"MBów", + "Site size limit changed to {0}MB": "Rozmiar limitu strony zmieniony na {0}MBów", + " New version of this page has just released.
Reload to see the modified content.": "Nowa wersja tej strony właśnie została wydana.
Odśwież by zobaczyć nową, zmodyfikowaną treść strony.", + "This site requests permission:": "Ta strona wymaga uprawnień:", + "_(Accept)": "Przyznaj uprawnienia", + + "Sign and publish": "Podpisz i opublikuj", + "Restart ZeroNet client?": "Uruchomić ponownie klienta ZeroNet?", + "Restart": "Uruchom ponownie" +} diff --git a/src/Translate/languages/pt-br.json b/src/Translate/languages/pt-br.json new file mode 100644 index 00000000..a842684f --- /dev/null +++ b/src/Translate/languages/pt-br.json @@ -0,0 +1,57 @@ +{ + "Congratulations, your port {0} is opened.
You are a full member of the ZeroNet network!": "Parabéns, a porta{0} está aberta.
Você é um membro completo da rede ZeroNet!", + "Tor mode active, every connection using Onion route.": "Modo Tor ativado, todas as conexões usam a rota Onion.", + "Successfully started Tor onion hidden services.": "Os serviços ocultos Tor onion foram inciados com sucesso.", + "Unable to start hidden services, please check your config.": "Não foi possível iniciar os serviços ocultos, por favor verifique suas configurações.", + "For faster connections open {0} port on your router.": "Para conexões mais rápidas, abra a porta {0} em seu roteador.", + "Your connection is restricted. Please, open {0} port on your router": "Sua conexão está restrita. Por favor, abra a porta {0} em seu roteador", + "or configure Tor to become a full member of the ZeroNet network.": "ou configure o Tor para se tornar um membro completo da rede ZeroNet.", + + "Select account you want to use in this site:": "Selecione a conta que deseja usar nesse site:", + "currently selected": "atualmente selecionada", + "Unique to site": "Única para o site", + + "Content signing failed": "Assinatura de conteúdo falhou", + "Content publish queued for {0:.0f} seconds.": "Publicação de conteúdo na fila por {0:.0f} segundos.", + "Content published to {0} peers.": "Conteúdo publicado para {0} peers.", + "No peers found, but your content is ready to access.": "Nenhum peer encontrado, mas seu conteúdo está pronto para ser acessado.", + "Your network connection is restricted. Please, open {0} port": "Sua conexão de rede está restrita. Por favor, abra a porta {0}", + "on your router to make your site accessible for everyone.": "em seu roteador para tornar seu site acessível para todos.", + "Content publish failed.": "Publicação de conteúdo falhou.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Esse arquivo ainda está sincronizado, se escreve-lo agora o conteúdo anterior poderá ser perdido.", + "Write content anyway": "Escrever o conteúdo mesmo assim", + "New certificate added:": "Novo certificado adicionado:", + "You current certificate:": "Seu certificado atual:", + "Change it to {auth_type}/{auth_user_name}@{domain}": "Alterar para {auth_type}/{auth_user_name}@{domain}", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certificado alterado para: {auth_type}/{auth_user_name}@{domain}.", + "Site cloned": "Site clonado", + + "You have successfully changed the web interface's language!": "Você alterou o idioma da interface web com sucesso!", + "Due to the browser's caching, the full transformation could take some minute.": "Devido ao cache do navegador, a transformação completa pode levar alguns minutos.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "A conexão com UiServer Websocket foi perdida. Reconectando...", + "Connection with UiServer Websocket recovered.": "Conexão com UiServer Websocket recuperada.", + "UiServer Websocket error, please reload the page.": "Erro de UiServer Websocket, por favor atualize a página.", + "   Connecting...": "   Conectando...", + "Site size: ": "Tamanho do site: ", + "MB is larger than default allowed ": "MB é maior do que o tamanho permitido por padrão", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Abrir site e definir limite de tamanho para \" + site_info.next_size_limit + \"MBs", + " files needs to be downloaded": " os arquivos precisam ser baixados", + " downloaded": " baixados", + " download failed": " falha no download", + "Peers found: ": "Peers encontrados: ", + "No peers found": "Nenhum peer encontrado", + "Running out of size limit (": "Passando do tamanho limite (", + "Set limit to \" + site_info.next_size_limit + \"MB": "Definir limite para \" + site_info.next_size_limit + \"MB", + "Site size limit changed to {0}MB": "Limite de tamanho do site alterado para {0}MBs", + " New version of this page has just released.
Reload to see the modified content.": " Uma nova versão desse site acaba de ser publicada.
Atualize para ver o conteúdo modificado.", + "This site requests permission:": "Esse site solicita permissão:", + "_(Accept)": "Conceder", + + "Save": "Salvar", + "Trackers announcing": "Trackers anunciando", + "Error": "Erro", + "Done": "Concluído", + "Tracker connection error detected.": "Erro de conexão com tracker foi detectado." + +} diff --git a/src/Translate/languages/ru.json b/src/Translate/languages/ru.json new file mode 100644 index 00000000..96c84b91 --- /dev/null +++ b/src/Translate/languages/ru.json @@ -0,0 +1,51 @@ +{ + "Congratulations, your port {0} is opened.
You are a full member of the ZeroNet network!": "Поздравляем, ваш порт {0} открыт.
Вы полноценный участник сети ZeroNet!", + "Tor mode active, every connection using Onion route.": "Режим Tor включен, все соединения осуществляются через Tor.", + "Successfully started Tor onion hidden services.": "Скрытый сервис Tor запущено успешно.", + "Unable to start hidden services, please check your config.": "Ошибка при запуске скрытого сервиса, пожалуйста проверьте настройки", + "For faster connections open {0} port on your router.": "Для более быстрой работы сети откройте {0} порт на вашем роутере.", + "Your connection is restricted. Please, open {0} port on your router": "Подключение ограничено. Пожалуйста откройте {0} порт на вашем роутере", + "or configure Tor to become a full member of the ZeroNet network.": "или настройте Tor что бы стать полноценным участником сети ZeroNet.", + + "Select account you want to use in this site:": "Выберите аккаунт для использования на этом сайте:", + "currently selected": "сейчас выбран", + "Unique to site": "Уникальный для этого сайта", + + "Content signing failed": "Подпись контента не удалась", + "Content publish queued for {0:.0f} seconds.": "Публикация контента поставлена в очередь {0:.0f} секунд.", + "Content published to {0} peers.": "Контент опубликован на {0} пирах.", + "No peers found, but your content is ready to access.": "Пиры не найдены, но ваш контент доступен.", + "Your network connection is restricted. Please, open {0} port": "Ваше подключение ограничено. Пожалуйста откройте {0} порт. ", + "on your router to make your site accessible for everyone.": "на вашем роутере, что бы ваш сайт стал доступнг посетителям.", + "Content publish failed.": "Ошибка при публикации контента.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Этот файл всё еще синхронизируется, если продолжить его изменение, предыдущий контент может быть потерян.", + "Write content anyway": "Записать контент в любом случае", + "New certificate added:": "Добавлен новый сертификат:", + "You current certificate:": "Ваш текущий сертификат: ", + "Change it to {auth_type}/{auth_user_name}@{domain}": "Изменить его на {auth_type}/{auth_user_name}@{domain}", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Сертификат изменен на: {auth_type}/{auth_user_name}@{domain}.", + "Site cloned": "Сайт склонирован", + + "You have successfully changed the web interface's language!": "Язык интерфейса успешно изменен!", + "Due to the browser's caching, the full transformation could take some minute.": "В зависимости от работы вашего браузера полное преобразование может занять пару минут.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "Подключение к UiServer Websocket прервано. Переподключаюсь...", + "Connection with UiServer Websocket recovered.": "Подключение к UiServer Websocket восстановлено.", + "UiServer Websocket error, please reload the page.": "Ошибка UiServer Websocket, перезагрузите страницу!", + "   Connecting...": "   Подключение...", + "Site size: ": "Размер сайта: ", + "MB is larger than default allowed ": "MB больше чем разрешено по умолчанию ", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Открыть сайт и установить лимит занимаемого места на \" + site_info.next_size_limit + \"MB", + " files needs to be downloaded": " файлы должны быть загружены", + " downloaded": " загружено", + " download failed": " ошибка загрузки", + "Peers found: ": "Пиров найдено: ", + "No peers found": "Пиры не найдены", + "Running out of size limit (": "Доступное место закончилось (", + "Set limit to \" + site_info.next_size_limit + \"MB": "Установить лимит на \" + site_info.next_size_limit + \"MB", + "Site size limit changed to {0}MB": "Лимит памяти на диске изменен на {0}MB", + " New version of this page has just released.
Reload to see the modified content.": "Доступна новая версия данной страницы
Обновите страницу, что бы увидеть изменения!", + "This site requests permission:": "Данный сайт запрашивает разрешения:", + "_(Accept)": "Предоставить" + +} diff --git a/src/Translate/languages/sk.json b/src/Translate/languages/sk.json new file mode 100644 index 00000000..8fb4554b --- /dev/null +++ b/src/Translate/languages/sk.json @@ -0,0 +1,57 @@ +{ + "Congratulations, your port {0} is opened.
You are a full member of the ZeroNet network!": "Blahoželáme, váš port {0} je otvorený.
Ste úplným členom siete ZeroNet!", + "Tor mode active, every connection using Onion route.": "Tor mód aktívny, všetky spojenia teraz používajú Onion sieť.", + "Successfully started Tor onion hidden services.": "Tor úspešne spustený.", + "Unable to start hidden services, please check your config.": "Nebolo možné spustiť Tor, prosím skontrolujte nastavenia.", + "For faster connections open {0} port on your router.": "Pre rýchlejšie spojenie otvorte na vašom routery port {0}", + "Your connection is restricted. Please, open {0} port on your router": "Vaše pripojenie je obmedzené. Prosím otvorte port {0} na vašom routery.", + "or configure Tor to become a full member of the ZeroNet network.": "alebo nastavte Tor aby ste sa tali plným členom siete ZeroNet.", + + "Select account you want to use in this site:": "Zvoľte účet ktorý chcete používať na tejto stránke:", + "currently selected": "aktuálne zvolené", + "Unique to site": "Unikátny pre stránku", + + "Content signing failed": "Podpísanie obsahu zlyhalo", + "Content publish queued for {0:.0f} seconds.": "Podpísanie obsahu bude na rade za {0:.0f} sekúnd", + "Content published to {0} peers.": "Obsah publikovaný {0} peer-erom", + "No peers found, but your content is ready to access.": "Neboli nájdený žiadny peer-ery, ale váš obsah je pripravený pre prístup.", + "Your network connection is restricted. Please, open {0} port": "Vaše pripojenie k sieti je obmedzené. Prosím otvorte port {0} na vašom routery.", + "on your router to make your site accessible for everyone.": "na vašom routery aby bola vaša stránka prístupná pre všetkých.", + "Content publish failed.": "Publikovanie obsahu zlyhalo.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Tento súbor sa stále synchronizuje, ak v ňom spravíte zmeny, predchádzajúci obsah sa môže stratiť.", + "Write content anyway": "Aj tak spraviť zmeny", + "New certificate added:": "Pridaný nový certifikát:", + "You current certificate:": "Váš aktuálny certifikát:", + "Change it to {auth_type}/{auth_user_name}@{domain}": "Zvoľte to na {auth_type}/{auth_user_name}@{domain}", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certifikát zmenený na: {auth_type}/{auth_user_name}@{domain}.", + "Site cloned": "Stránka naklonovaná", + + "You have successfully changed the web interface's language!": "Úspešne ste zmenili jazyk webového rozhrania!", + "Due to the browser's caching, the full transformation could take some minute.": "Kôli cachu webového prehliadavača, ceľková transformácia môže chvíĺu trvať.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "Spojenie s UiServer Websocket bolo stratené. Znovu pripájame...", + "Connection with UiServer Websocket recovered.": "Spojenie s UiServer Websocket obnovené.", + "UiServer Websocket error, please reload the page.": "Chyba UiServer Websocket-u, prosím znovu načítajte stránku.", + "   Connecting...": "   Pripájanie...", + "Site size: ": "Veľkosť stránky: ", + "MB is larger than default allowed ": "MB je viac ako povolená hodnota", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Otvoriť stránku a nastaviť limit veľkosti na \" + site_info.next_size_limit + \"MB", + " files needs to be downloaded": " súbory je potrebné stiahnuť", + " downloaded": " stiahnuté", + " download failed": " sťahovanie zlyhalo", + "Peers found: ": "Peer-erov nájdených: ", + "No peers found": "Neboli nájdený žiadny peer-ery", + "Running out of size limit (": "Presahuje povolený limit veľkosti pamäte (", + "Set limit to \" + site_info.next_size_limit + \"MB": "Nastaviť limit na \" + site_info.next_size_limit + \"MB ändern", + "Site size limit changed to {0}MB": "Limit veľkosti pamäte nastavený na {0}MB", + " New version of this page has just released.
Reload to see the modified content.": " Bola vydaná nová verzia tejto stránky.
Znovu načítajte túto stránku aby bolo vidieť zmeny.", + "This site requests permission:": "Táto stránka vyžaduje povolenie:", + "_(Accept)": "Udeliť", + + "on": "", + "Oct": "Okt", + "May": "Máj", + "Jun": "Jún", + "Jul": "Júl" + +} diff --git a/src/Translate/languages/sl.json b/src/Translate/languages/sl.json new file mode 100644 index 00000000..2aeb628e --- /dev/null +++ b/src/Translate/languages/sl.json @@ -0,0 +1,51 @@ +{ + "Congratulations, your port {0} is opened.
You are a full member of the ZeroNet network!": "Čestitke, vaša vrata {0} so odprta.
Postali ste polnopravni član ZeroNet omrežja!", + "Tor mode active, every connection using Onion route.": "Način Tor aktiven.", + "Successfully started Tor onion hidden services.": "Storitve Tor uspešno zagnane.", + "Unable to start hidden services, please check your config.": "Ni bilo mogoče zagnati Tor storitev. Preverite nastavitve.", + "For faster connections open {0} port on your router.": "Za hitrejše povezave na svojem usmerjevalniku odprite vrata {0}.", + "Your connection is restricted. Please, open {0} port on your router": "Vaša povezava je omejena. Na svojem usmerjevalniku odprite vrata {0}", + "or configure Tor to become a full member of the ZeroNet network.": "ali nastavite Tor, da postanete polnopravni član ZeroNet omrežja.", + + "Select account you want to use in this site:": "Izberite račun, ki ga želite uporabiti na tem spletnem mestu:", + "currently selected": "trenutno izbrano", + "Unique to site": "Edinstven za spletno mesto", + + "Content signing failed": "Podpisovanje vsebine ni uspelo", + "Content publish queued for {0:.0f} seconds.": "Objava vsebine na čakanju za {0:.0f} sekund.", + "Content published to {0} peers.": "Vsebina objavljena na {0} povezavah.", + "No peers found, but your content is ready to access.": "Ni nobenih povezav, vendar je vaša vsebina pripravljena za dostop.", + "Your network connection is restricted. Please, open {0} port": "Vaša povezava je omejena. Prosimo, odprite vrata {0}", + "on your router to make your site accessible for everyone.": "na vašem usmerjevalniku, da bo vaše spletno mesto dostopno za vse.", + "Content publish failed.": "Objavljanje vsebine ni uspelo.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Ta datoteka se še vedno sinhronizira. Če jo uredite zdaj, se lahko zgodi, da bo prejšnja vsebina izgubljena.", + "Write content anyway": "Vseeno uredi vsebino", + "New certificate added:": "Dodano novo potrdilo:", + "You current certificate:": "Trenutno potrdilo:", + "Change it to {auth_type}/{auth_user_name}@{domain}": "Spremenite ga na {auth_type}/{auth_user_name}@{domain}", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Potrdilo spremenjeno na: {auth_type}/{auth_user_name}@{domain}.", + "Site cloned": "Stran klonirana", + + "You have successfully changed the web interface's language!": "Uspešno ste spremenili jezik spletnega vmesnika!", + "Due to the browser's caching, the full transformation could take some minute.": "Zaradi predpomnjenja brskalnika lahko popolna preobrazba traja nekaj minut.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "Povezava z UiServer Websocket je bila izgubljena. Ponovno povezovanje ...", + "Connection with UiServer Websocket recovered.": "Povezava z UiServer Websocket je vzpostavljena.", + "UiServer Websocket error, please reload the page.": "Napaka UiServer Websocket. Prosimo osvežite stran.", + "   Connecting...": "   Povezovanje ...", + "Site size: ": "Velikost strani: ", + "MB is larger than default allowed ": "MB je večja od dovoljenih", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Odpri to stran in nastavi omejitev na \" + site_info.next_size_limit + \"MB", + " files needs to be downloaded": " datotek mora biti prenešenih", + " downloaded": " preneseno", + " download failed": " prenos ni uspel", + "Peers found: ": "Najdene povezave: ", + "No peers found": "Ni najdenih povezav", + "Running out of size limit (": "Zmanjkuje dovoljenega prostora (", + "Set limit to \" + site_info.next_size_limit + \"MB": "Nastavi omejitev na \" + site_info.next_size_limit + \"MB", + "Site size limit changed to {0}MB": "Omejitev strani nastavljena na{0} MB", + " New version of this page has just released.
Reload to see the modified content.": " Ravnokar je bila objavljena nova različica te strani.
Osvežite jo, da boste videli novo vsebino.", + "This site requests permission:": "Ta stran zahteva dovoljenja:", + "_(Accept)": "Dovoli" + +} diff --git a/src/Translate/languages/tr.json b/src/Translate/languages/tr.json new file mode 100644 index 00000000..09a1bdb5 --- /dev/null +++ b/src/Translate/languages/tr.json @@ -0,0 +1,51 @@ +{ + "Congratulations, your port {0} is opened.
You are a full member of the ZeroNet network!": "Tebrikler, portunuz ({0}) açık.
Artık ZeroNet ağına katıldınız!", + "Tor mode active, every connection using Onion route.": "Tor aktif, tüm bağlantılar Onion yönlendircisini kullanıyor.", + "Successfully started Tor onion hidden services.": "Gizli Tor hizmetleri başlatıldı.", + "Unable to start hidden services, please check your config.": "Gizli hizmetler başlatılamadı, lütfen ayarlarınızı kontrol ediniz.", + "For faster connections open {0} port on your router.": "Daha hızlı bağlantı için {0} nolu portu bilgisayarınıza yönlendirin.", + "Your connection is restricted. Please, open {0} port on your router": "Sınırlı bağlantı. Lütfen, {0} nolu portu bilgisayarınıza yönlendirin", + "or configure Tor to become a full member of the ZeroNet network.": "ya da ZeroNet ağına tam olarak katılabilmek için Tor'u kullanın.", + + "Select account you want to use in this site:": "Bu sitede kullanmak için bir hesap seçiniz:", + "currently selected": "kullanılan", + "Unique to site": "Bu site için benzersiz", + + "Content signing failed": "İçerik imzalama başarısız oldu", + "Content publish queued for {0:.0f} seconds.": "İçerik yayımlanmak üzere {0:.0f} saniyedir kuyrukta.", + "Content published to {0} peers.": "İçerik {0} eşe dağıtıldı.", + "No peers found, but your content is ready to access.": "Eş bulunamadı, ama içeriğiniz erişime hazır.", + "Your network connection is restricted. Please, open {0} port": "Sınırlı bağlantı. Lütfen, {0} nolu portu bilgisayarınıza yönlendirin", + "on your router to make your site accessible for everyone.": "böylece sitenizi herkes için erişilebilir yapabilirsiniz", + "Content publish failed.": "İçerik yayımlama başarısız oldu.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Bu dosya hala güncelleniyor, eğer şimdi kaydederseniz, önceki içerik kaybolabilir.", + "Write content anyway": "Yine de kaydet", + "New certificate added:": "Yeni sertifika eklendi:", + "You current certificate:": "Kullanılan sertifikanız:", + "Change it to {auth_type}/{auth_user_name}@{domain}": "{auth_type}/{auth_user_name}@{domain} olarak değiştir.", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "{auth_type}/{auth_user_name}@{domain} olarak değiştirildi", + "Site cloned": "Site klonlandı", + + "You have successfully changed the web interface's language!": "WEB ara yüzü için dil başarıyla değiştirildi!", + "Due to the browser's caching, the full transformation could take some minute.": "Tam dönüşümün sağlanması, tarayıcı önbelleklemesi yüzünden zaman alabilir.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "UiServer Websocket ile bağlantı kesildi. Yeniden bağlanılıyor...", + "Connection with UiServer Websocket recovered.": "UiServer Websocket ile bağlantı yeniden kuruldu.", + "UiServer Websocket error, please reload the page.": "UiServer Websocket hatası, lütfen sayfayı yenileyin.", + "   Connecting...": "   Bağlanıyor...", + "Site size: ": "Site boyutu: ", + "MB is larger than default allowed ": "MB izin verilenden fazla ", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Siteyi açın ve boyut sınırını \" + site_info.next_size_limit + \"MB'ye yükseltin", + " files needs to be downloaded": " indirilmesi gereken dosyalar", + " downloaded": " indirildi", + " download failed": " indirme başarısız", + "Peers found: ": "Bulunan eşler: ", + "No peers found": "Eş bulunamadı", + "Running out of size limit (": "Boyut sınırlamasını aştı (", + "Set limit to \" + site_info.next_size_limit + \"MB": "Sınırlamayı \" + site_info.next_size_limit + \"MB'ye yükselt", + "Site size limit changed to {0}MB": "Site boyut sınırlaması {0}MB olarak ayarlandı", + " New version of this page has just released.
Reload to see the modified content.": " Bu sayfanın yeni versiyonu yayımlandı.
Değişen içeriği görmek için yeniden yükleyiniz.", + "This site requests permission:": "Bu site bir izin istiyor:", + "_(Accept)": "İzin ver" + +} diff --git a/src/Translate/languages/zh-tw.json b/src/Translate/languages/zh-tw.json new file mode 100644 index 00000000..0ec071b4 --- /dev/null +++ b/src/Translate/languages/zh-tw.json @@ -0,0 +1,54 @@ +{ + "Congratulations, your port {0} is opened.
You are a full member of the ZeroNet network!": "祝賀,你的埠 ({0}) 已經打開。
你已經是 ZeroNet 網路的正式成員了!", + "Tor mode active, every connection using Onion route.": "Tor 模式啟用,每個連接正在使用洋蔥路由。", + "Successfully started Tor onion hidden services.": "成功啟動 Tor 洋蔥隱藏服務。", + "Unable to start hidden services, please check your config.": "無法打開隱藏服務,請檢查你的配置。", + "For faster connections open {0} port on your router.": "為了更快的連接請在路由器上打開 {0} 埠。", + "Your connection is restricted. Please, open {0} port on your router": "你的連接受限制。請在你的路由器上打開 {0} 埠", + "or configure Tor to become a full member of the ZeroNet network.": "或者配置你的 Tor 來成為 ZeroNet 的正式成員。", + + "Select account you want to use in this site:": "選擇你要在這個網站使用的帳戶:", + "currently selected": "當前選擇", + "Unique to site": "網站獨有身份", + + "Content signing failed": "內容簽署失敗", + "Content publish queued for {0:.0f} seconds.": "內容已加入 {0:.0f} 秒後的發佈隊列。", + "Content published to {0}/{1} peers.": "內容已發佈到 {0}/{1} 個節點。", + "Content published to {0} peers.": "內容已發佈到 {0} 個節點。", + "No peers found, but your content is ready to access.": "找不到節點,但是你的內容已經準備好被訪問。", + "Your network connection is restricted. Please, open {0} port": "你的網路連接受限制。請在你的路由器上打開 {0} 埠", + "on your router to make your site accessible for everyone.": "確保你的網站能被每一個人訪問。", + "Content publish failed.": "內容發佈失敗。", + "This file still in sync, if you write it now, then the previous content may be lost.": "這個檔仍然在同步中,如果你現在寫入它,之前的內容可能會被丟失。", + "Write content anyway": "強制寫入內容", + "New certificate added:": "新證書:", + "You current certificate:": "你當前的證書:", + "Change it to {auth_type}/{auth_user_name}@{domain}": "改變至 {auth_type}/{auth_user_name}@{domain}-ra", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "證書更改至:{auth_type}/{auth_user_name}@{domain}。", + "Site cloned": "網站已克隆", + + "You have successfully changed the web interface's language!": "你已經成功改變了 Web 界面的語言!", + "Due to the browser's caching, the full transformation could take some minute.": "由於你的瀏覽器緩存,完整的翻譯可能需要花幾分鐘。", + + "Connection with UiServer Websocket was lost. Reconnecting...": "UiServer Websocket 的連線已丟失。重新連線中...", + "Connection with UiServer Websocket recovered.": "UiServer Websocket 的連線已恢復。", + "UiServer Websocket error, please reload the page.": "UiServer Websocket 錯誤,請重新載入頁面。", + "   Connecting...": "   連線中...", + "Site size: ": "網站大小:", + "MB is larger than default allowed ": "MB 比預設允許的值更大 ", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "打開網站並設定大小限制到 \" + site_info.next_size_limit + \"MB", + " files needs to be downloaded": " 個檔需要下載", + " downloaded": " 已下載", + " download failed": " 下載失敗", + "Peers found: ": "已找到節點:", + "No peers found": "找不到節點", + "Running out of size limit (": "超出大小限制", + "Set limit to \" + site_info.next_size_limit + \"MB": "設定限制到 \" + site_info.next_size_limit + \"MB", + "Cloning site...": "複製網站中...", + "Site cloned": "網站已複製", + "Site size limit changed to {0}MB": "網站大小限制已改變到 {0}MB", + " New version of this page has just released.
Reload to see the modified content.": " 本頁面的新版本已經發佈。
重新載入來查看更改後的內容。", + "This site requests permission:": "這個網站的請求許可權:", + "_(Accept)": "授權" + +} diff --git a/src/Translate/languages/zh.json b/src/Translate/languages/zh.json new file mode 100644 index 00000000..16a40b1a --- /dev/null +++ b/src/Translate/languages/zh.json @@ -0,0 +1,55 @@ +{ + "Congratulations, your port {0} is opened.
You are a full member of the ZeroNet network!": "祝贺,您的端口 ({0}) 已经打开。
您已经是 ZeroNet 网络的正式成员了!", + "Tor mode active, every connection using Onion route.": "Tor 模式启用,每个连接正在使用洋葱路由。", + "Successfully started Tor onion hidden services.": "成功启动 Tor 洋葱隐藏服务。", + "Unable to start hidden services, please check your config.": "无法打开隐藏服务,请检查您的配置。", + "For faster connections open {0} port on your router.": "为了更快的连接请在路由器上打开 {0} 端口。", + "Your connection is restricted. Please, open {0} port on your router": "您的连接受限制。请在您的路由器上打开 {0} 端口", + "or configure Tor to become a full member of the ZeroNet network.": "或者配置您的 Tor 来成为 ZeroNet 的正式成员。", + + "Select account you want to use in this site:": "选择您要在这个网站使用的帐户:", + "No certificate": "没有证书", + "currently selected": "当前选择", + "Unique to site": "网站独有身份", + + "Content signing failed": "内容签名失败", + "Content publish queued for {0:.0f} seconds.": "内容已加入 {0:.0f} 秒后的发布队列。", + "Content published to {0}/{1} peers.": "内容已发布到 {0}/{1} 个节点。", + "Content published to {0} peers.": "内容已发布到 {0} 个节点。", + "No peers found, but your content is ready to access.": "找不到节点,但是您的内容已经准备好被访问。", + "Your network connection is restricted. Please, open {0} port": "您的网络连接受限制。请在您的路由器上打开 {0} 端口", + "on your router to make your site accessible for everyone.": "确保您的站点能被每一个人访问。", + "Content publish failed.": "内容发布失败。", + "This file still in sync, if you write it now, then the previous content may be lost.": "这个文件仍然在同步中,如果您现在写入它,之前的内容可能会被丢失。", + "Write content anyway": "强制写入内容", + "New certificate added:": "新证书:", + "You current certificate:": "您当前的证书:", + "Change it to {auth_type}/{auth_user_name}@{domain}": "更改至 {auth_type}/{auth_user_name}@{domain}-ra", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "证书更改至:{auth_type}/{auth_user_name}@{domain}。", + "Site cloned": "站点已克隆", + + "You have successfully changed the web interface's language!": "您已经成功更改了 web 界面的语言!", + "Due to the browser's caching, the full transformation could take some minute.": "由于您的浏览器缓存,完整的翻译可能需要花几分钟。", + + "Connection with UiServer Websocket was lost. Reconnecting...": "UiServer Websocket 的连接已丢失。重新连接中...", + "Connection with UiServer Websocket recovered.": "UiServer Websocket 的连接已恢复。", + "UiServer Websocket error, please reload the page.": "UiServer Websocket 错误,请重新加载页面。", + "   Connecting...": "   连接中...", + "Site size: ": "站点大小:", + "MB is larger than default allowed ": "MB 比默认允许的值更大 ", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "打开站点并设置大小限制到 \" + site_info.next_size_limit + \"MB", + " files needs to be downloaded": " 个文件需要下载", + " downloaded": " 已下载", + " download failed": " 下载失败", + "Peers found: ": "已找到节点:", + "No peers found": "找不到节点", + "Running out of size limit (": "超出大小限制", + "Set limit to \" + site_info.next_size_limit + \"MB": "设置限制到 \" + site_info.next_size_limit + \"MB", + "Cloning site...": "克隆站点中...", + "Site cloned": "站点已克隆", + "Site size limit changed to {0}MB": "站点大小限制已更改到 {0}MB", + " New version of this page has just released.
Reload to see the modified content.": " 本页面的新版本已经发布。
重新加载来查看更改后的内容。", + "This site requests permission:": "这个站点的请求权限:", + "_(Accept)": "授权" + +} diff --git a/src/Ui/UiRequest.py b/src/Ui/UiRequest.py new file mode 100644 index 00000000..8f00efcb --- /dev/null +++ b/src/Ui/UiRequest.py @@ -0,0 +1,949 @@ +import time +import re +import os +import mimetypes +import json +import html +import urllib +import socket + +import gevent + +from Config import config +from Site import SiteManager +from User import UserManager +from Plugin import PluginManager +from Ui.UiWebsocket import UiWebsocket +from Crypt import CryptHash +from util import helper + +status_texts = { + 200: "200 OK", + 206: "206 Partial Content", + 400: "400 Bad Request", + 403: "403 Forbidden", + 404: "404 Not Found", + 500: "500 Internal Server Error", +} + +content_types = { + "asc": "application/pgp-keys", + "css": "text/css", + "gpg": "application/pgp-encrypted", + "html": "text/html", + "js": "application/javascript", + "json": "application/json", + "oga": "audio/ogg", + "ogg": "application/ogg", + "ogv": "video/ogg", + "sig": "application/pgp-signature", + "txt": "text/plain", + "webmanifest": "application/manifest+json", + "wasm": "application/wasm", + "webp": "image/webp" +} + + +class SecurityError(Exception): + pass + + +@PluginManager.acceptPlugins +class UiRequest(object): + + def __init__(self, server, get, env, start_response): + if server: + self.server = server + self.log = server.log + self.get = get # Get parameters + self.env = env # Enviroment settings + # ['CONTENT_LENGTH', 'CONTENT_TYPE', 'GATEWAY_INTERFACE', 'HTTP_ACCEPT', 'HTTP_ACCEPT_ENCODING', 'HTTP_ACCEPT_LANGUAGE', + # 'HTTP_COOKIE', 'HTTP_CACHE_CONTROL', 'HTTP_HOST', 'HTTP_HTTPS', 'HTTP_ORIGIN', 'HTTP_PROXY_CONNECTION', 'HTTP_REFERER', + # 'HTTP_USER_AGENT', 'PATH_INFO', 'QUERY_STRING', 'REMOTE_ADDR', 'REMOTE_PORT', 'REQUEST_METHOD', 'SCRIPT_NAME', + # 'SERVER_NAME', 'SERVER_PORT', 'SERVER_PROTOCOL', 'SERVER_SOFTWARE', 'werkzeug.request', 'wsgi.errors', + # 'wsgi.input', 'wsgi.multiprocess', 'wsgi.multithread', 'wsgi.run_once', 'wsgi.url_scheme', 'wsgi.version'] + + self.start_response = start_response # Start response function + self.user = None + self.script_nonce = None # Nonce for script tags in wrapper html + + def learnHost(self, host): + self.server.allowed_hosts.add(host) + self.server.log.info("Added %s as allowed host" % host) + + def isHostAllowed(self, host): + if host in self.server.allowed_hosts: + return True + + # Allow any IP address as they are not affected by DNS rebinding + # attacks + if helper.isIp(host): + self.learnHost(host) + return True + + if ":" in host and helper.isIp(host.rsplit(":", 1)[0]): # Test without port + self.learnHost(host) + return True + + if self.isProxyRequest(): # Support for chrome extension proxy + if self.isDomain(host): + return True + else: + return False + + return False + + def isDomain(self, address): + return self.server.site_manager.isDomainCached(address) + + def resolveDomain(self, domain): + return self.server.site_manager.resolveDomainCached(domain) + + # Call the request handler function base on path + def route(self, path): + # Restict Ui access by ip + if config.ui_restrict and self.env['REMOTE_ADDR'] not in config.ui_restrict: + return self.error403(details=False) + + # Check if host allowed to do request + if not self.isHostAllowed(self.env.get("HTTP_HOST")): + ret_error = next(self.error403("Invalid host: %s" % self.env.get("HTTP_HOST"), details=False)) + + http_get = self.env["PATH_INFO"] + if self.env["QUERY_STRING"]: + http_get += "?{0}".format(self.env["QUERY_STRING"]) + self_host = self.env["HTTP_HOST"].split(":")[0] + self_ip = self.env["HTTP_HOST"].replace(self_host, socket.gethostbyname(self_host)) + link = "http://{0}{1}".format(self_ip, http_get) + ret_body = """ +

Start the client with --ui_host "{host}" argument

+

or access via ip: {link}

+ """.format( + host=html.escape(self.env["HTTP_HOST"]), + link=html.escape(link) + ).encode("utf8") + return iter([ret_error, ret_body]) + + # Prepend .bit host for transparent proxy + if self.isDomain(self.env.get("HTTP_HOST")): + path = re.sub("^/", "/" + self.env.get("HTTP_HOST") + "/", path) + path = re.sub("^http://zero[/]+", "/", path) # Remove begining http://zero/ for chrome extension + path = re.sub("^http://", "/", path) # Remove begining http for chrome extension .bit access + + # Sanitize request url + path = path.replace("\\", "/") + if "../" in path or "./" in path: + return self.error403("Invalid path: %s" % path) + + if self.env["REQUEST_METHOD"] == "OPTIONS": + if "/" not in path.strip("/"): + content_type = self.getContentType("index.html") + else: + content_type = self.getContentType(path) + + extra_headers = {"Access-Control-Allow-Origin": "null"} + + self.sendHeader(content_type=content_type, extra_headers=extra_headers, noscript=True) + return "" + + if path == "/": + return self.actionIndex() + elif path in ("/favicon.ico", "/apple-touch-icon.png"): + return self.actionFile("src/Ui/media/img/%s" % path) + # Internal functions + elif "/ZeroNet-Internal/" in path: + path = re.sub(".*?/ZeroNet-Internal/", "/", path) + func = getattr(self, "action" + path.strip("/"), None) # Check if we have action+request_path function + if func: + return func() + else: + return self.error404(path) + # Media + elif path.startswith("/uimedia/"): + return self.actionUiMedia(path) + elif "/uimedia/" in path: + # uimedia within site dir (for chrome extension) + path = re.sub(".*?/uimedia/", "/uimedia/", path) + return self.actionUiMedia(path) + # Websocket + elif path == "/Websocket": + return self.actionWebsocket() + # Debug + elif path == "/Debug" and config.debug: + return self.actionDebug() + elif path == "/Console" and config.debug: + return self.actionConsole() + # Wrapper-less static files + elif path.startswith("/raw/"): + return self.actionSiteMedia(path.replace("/raw", "/media", 1), header_noscript=True) + + elif path.startswith("/add/"): + return self.actionSiteAdd() + # Site media wrapper + else: + if self.get.get("wrapper_nonce"): + if self.get["wrapper_nonce"] in self.server.wrapper_nonces: + self.server.wrapper_nonces.remove(self.get["wrapper_nonce"]) + return self.actionSiteMedia("/media" + path) # Only serve html files with frame + else: + self.server.log.warning("Invalid wrapper nonce: %s" % self.get["wrapper_nonce"]) + body = self.actionWrapper(path) + else: + body = self.actionWrapper(path) + if body: + return body + else: + func = getattr(self, "action" + path.strip("/"), None) # Check if we have action+request_path function + if func: + return func() + else: + ret = self.error404(path) + return ret + + # The request is proxied by chrome extension or a transparent proxy + def isProxyRequest(self): + return self.env["PATH_INFO"].startswith("http://") or (self.server.allow_trans_proxy and self.isDomain(self.env.get("HTTP_HOST"))) + + def isWebSocketRequest(self): + return self.env.get("HTTP_UPGRADE") == "websocket" + + def isAjaxRequest(self): + return self.env.get("HTTP_X_REQUESTED_WITH") == "XMLHttpRequest" + + # Get mime by filename + def getContentType(self, file_name): + file_name = file_name.lower() + ext = file_name.rsplit(".", 1)[-1] + + if ext in content_types: + content_type = content_types[ext] + elif ext in ("ttf", "woff", "otf", "woff2", "eot", "sfnt", "collection"): + content_type = "font/%s" % ext + else: + content_type = mimetypes.guess_type(file_name)[0] + + if not content_type: + content_type = "application/octet-stream" + + return content_type.lower() + + # Return: Posted variables + def getPosted(self): + if self.env['REQUEST_METHOD'] == "POST": + return dict(urllib.parse.parse_qsl( + self.env['wsgi.input'].readline().decode() + )) + else: + return {} + + # Return: Cookies based on self.env + def getCookies(self): + raw_cookies = self.env.get('HTTP_COOKIE') + if raw_cookies: + cookies = urllib.parse.parse_qsl(raw_cookies) + return {key.strip(): val for key, val in cookies} + else: + return {} + + def getCurrentUser(self): + if self.user: + return self.user # Cache + self.user = UserManager.user_manager.get() # Get user + if not self.user: + self.user = UserManager.user_manager.create() + return self.user + + def getRequestUrl(self): + if self.isProxyRequest(): + if self.env["PATH_INFO"].startswith("http://zero/"): + return self.env["PATH_INFO"] + else: # Add http://zero to direct domain access + return self.env["PATH_INFO"].replace("http://", "http://zero/", 1) + else: + return self.env["wsgi.url_scheme"] + "://" + self.env["HTTP_HOST"] + self.env["PATH_INFO"] + + def getReferer(self): + referer = self.env.get("HTTP_REFERER") + if referer and self.isProxyRequest() and not referer.startswith("http://zero/"): + return referer.replace("http://", "http://zero/", 1) + else: + return referer + + def isScriptNonceSupported(self): + user_agent = self.env.get("HTTP_USER_AGENT") + if "Edge/" in user_agent: + is_script_nonce_supported = False + elif "Safari/" in user_agent and "Chrome/" not in user_agent: + is_script_nonce_supported = False + else: + is_script_nonce_supported = True + return is_script_nonce_supported + + # Send response headers + def sendHeader(self, status=200, content_type="text/html", noscript=False, allow_ajax=False, script_nonce=None, extra_headers=[]): + headers = {} + headers["Version"] = "HTTP/1.1" + headers["Connection"] = "Keep-Alive" + headers["Keep-Alive"] = "max=25, timeout=30" + headers["X-Frame-Options"] = "SAMEORIGIN" + if content_type != "text/html" and self.env.get("HTTP_REFERER") and self.isSameOrigin(self.getReferer(), self.getRequestUrl()): + headers["Access-Control-Allow-Origin"] = "*" # Allow load font files from css + + if noscript: + headers["Content-Security-Policy"] = "default-src 'none'; sandbox allow-top-navigation allow-forms; img-src *; font-src * data:; media-src *; style-src * 'unsafe-inline';" + elif script_nonce and self.isScriptNonceSupported(): + headers["Content-Security-Policy"] = "default-src 'none'; script-src 'nonce-{0}'; img-src 'self' blob: data:; style-src 'self' blob: 'unsafe-inline'; connect-src *; frame-src 'self' blob:".format(script_nonce) + + if allow_ajax: + headers["Access-Control-Allow-Origin"] = "null" + + if self.env["REQUEST_METHOD"] == "OPTIONS": + # Allow json access + headers["Access-Control-Allow-Headers"] = "Origin, X-Requested-With, Content-Type, Accept, Cookie, Range" + headers["Access-Control-Allow-Credentials"] = "true" + + # Download instead of display file types that can be dangerous + if re.findall("/svg|/xml|/x-shockwave-flash|/pdf", content_type): + headers["Content-Disposition"] = "attachment" + + cacheable_type = ( + self.env["REQUEST_METHOD"] == "OPTIONS" or + content_type.split("/", 1)[0] in ("image", "video", "font") or + content_type in ("application/javascript", "text/css") + ) + + if content_type in ("text/plain", "text/html", "text/css", "application/javascript", "application/json", "application/manifest+json"): + content_type += "; charset=utf-8" + + if status in (200, 206) and cacheable_type: # Cache Css, Js, Image files for 10min + headers["Cache-Control"] = "public, max-age=600" # Cache 10 min + else: + headers["Cache-Control"] = "no-cache, no-store, private, must-revalidate, max-age=0" # No caching at all + headers["Content-Type"] = content_type + headers.update(extra_headers) + return self.start_response(status_texts[status], list(headers.items())) + + # Renders a template + def render(self, template_path, *args, **kwargs): + template = open(template_path, encoding="utf8").read() + + def renderReplacer(m): + if m.group(1) in kwargs: + return "%s" % kwargs.get(m.group(1), "") + else: + return m.group(0) + + template_rendered = re.sub("{(.*?)}", renderReplacer, template) + + return template_rendered.encode("utf8") + + def isWrapperNecessary(self, path): + match = re.match(r"/(?P
[A-Za-z0-9\._-]+)(?P/.*|$)", path) + + if not match: + return True + + inner_path = match.group("inner_path").lstrip("/") + if not inner_path or path.endswith("/"): # It's a directory + content_type = self.getContentType("index.html") + else: # It's a file + content_type = self.getContentType(inner_path) + + is_html_file = "html" in content_type or "xhtml" in content_type + + return is_html_file + + @helper.encodeResponse + def formatRedirect(self, url): + return """ + + + Redirecting to {0} + + + + """.format(html.escape(url)) + + # - Actions - + + # Redirect to an url + def actionRedirect(self, url): + self.start_response('301 Redirect', [('Location', str(url))]) + yield self.formatRedirect(url) + + def actionIndex(self): + return self.actionRedirect("/" + config.homepage + "/") + + # Render a file from media with iframe site wrapper + def actionWrapper(self, path, extra_headers=None): + if not extra_headers: + extra_headers = {} + script_nonce = self.getScriptNonce() + + match = re.match(r"/(?P
[A-Za-z0-9\._-]+)(?P/.*|$)", path) + just_added = False + if match: + address = match.group("address") + inner_path = match.group("inner_path").lstrip("/") + + if not self.isWrapperNecessary(path): + return self.actionSiteMedia("/media" + path) # Serve non-html files without wrapper + + if self.isAjaxRequest(): + return self.error403("Ajax request not allowed to load wrapper") # No ajax allowed on wrapper + + if self.isWebSocketRequest(): + return self.error403("WebSocket request not allowed to load wrapper") # No websocket + + if "text/html" not in self.env.get("HTTP_ACCEPT", ""): + return self.error403("Invalid Accept header to load wrapper: %s" % self.env.get("HTTP_ACCEPT", "")) + if "prefetch" in self.env.get("HTTP_X_MOZ", "") or "prefetch" in self.env.get("HTTP_PURPOSE", ""): + return self.error403("Prefetch not allowed to load wrapper") + + site = SiteManager.site_manager.get(address) + + if site and site.content_manager.contents.get("content.json"): + title = site.content_manager.contents["content.json"]["title"] + else: + title = "Loading %s..." % address + site = SiteManager.site_manager.get(address) + if site: # Already added, but not downloaded + if time.time() - site.announcer.time_last_announce > 5: + site.log.debug("Reannouncing site...") + gevent.spawn(site.update, announce=True) + else: # If not added yet + site = SiteManager.site_manager.need(address) + just_added = True + + if not site: + return False + + self.sendHeader(extra_headers=extra_headers, script_nonce=script_nonce) + + min_last_announce = (time.time() - site.announcer.time_last_announce) / 60 + if min_last_announce > 60 and site.isServing() and not just_added: + site.log.debug("Site requested, but not announced recently (last %.0fmin ago). Updating..." % min_last_announce) + gevent.spawn(site.update, announce=True) + + return iter([self.renderWrapper(site, path, inner_path, title, extra_headers, script_nonce=script_nonce)]) + # Make response be sent at once (see https://github.com/HelloZeroNet/ZeroNet/issues/1092) + + else: # Bad url + return False + + def getSiteUrl(self, address): + if self.isProxyRequest(): + return "http://zero/" + address + else: + return "/" + address + + def getWsServerUrl(self): + if self.isProxyRequest(): + if self.env["REMOTE_ADDR"] == "127.0.0.1": # Local client, the server address also should be 127.0.0.1 + server_url = "http://127.0.0.1:%s" % self.env["SERVER_PORT"] + else: # Remote client, use SERVER_NAME as server's real address + server_url = "http://%s:%s" % (self.env["SERVER_NAME"], self.env["SERVER_PORT"]) + else: + server_url = "" + return server_url + + def processQueryString(self, site, query_string): + match = re.search("zeronet_peers=(.*?)(&|$)", query_string) + if match: + query_string = query_string.replace(match.group(0), "") + num_added = 0 + for peer in match.group(1).split(","): + if not re.match(".*?:[0-9]+$", peer): + continue + ip, port = peer.rsplit(":", 1) + if site.addPeer(ip, int(port), source="query_string"): + num_added += 1 + site.log.debug("%s peers added by query string" % num_added) + + return query_string + + def renderWrapper(self, site, path, inner_path, title, extra_headers, show_loadingscreen=None, script_nonce=None): + file_inner_path = inner_path + if not file_inner_path: + file_inner_path = "index.html" # If inner path defaults to index.html + + if file_inner_path.endswith("/"): + file_inner_path = file_inner_path + "index.html" + + address = re.sub("/.*", "", path.lstrip("/")) + if self.isProxyRequest() and (not path or "/" in path[1:]): + if self.env["HTTP_HOST"] == "zero": + root_url = "/" + address + "/" + file_url = "/" + address + "/" + inner_path + else: + file_url = "/" + inner_path + root_url = "/" + + else: + file_url = "/" + address + "/" + inner_path + root_url = "/" + address + "/" + + if self.isProxyRequest(): + self.server.allowed_ws_origins.add(self.env["HTTP_HOST"]) + + # Wrapper variable inits + body_style = "" + meta_tags = "" + postmessage_nonce_security = "false" + + wrapper_nonce = self.getWrapperNonce() + inner_query_string = self.processQueryString(site, self.env.get("QUERY_STRING", "")) + + if "?" in inner_path: + sep = "&" + else: + sep = "?" + + if inner_query_string: + inner_query_string = "%s%s&wrapper_nonce=%s" % (sep, inner_query_string, wrapper_nonce) + else: + inner_query_string = "%swrapper_nonce=%s" % (sep, wrapper_nonce) + + if self.isProxyRequest(): # Its a remote proxy request + homepage = "http://zero/" + config.homepage + else: # Use relative path + homepage = "/" + config.homepage + + server_url = self.getWsServerUrl() # Real server url for WS connections + + user = self.getCurrentUser() + if user: + theme = user.settings.get("theme", "light") + else: + theme = "light" + + themeclass = "theme-%-6s" % re.sub("[^a-z]", "", theme) + + if site.content_manager.contents.get("content.json"): # Got content.json + content = site.content_manager.contents["content.json"] + if content.get("background-color"): + background_color = content.get("background-color-%s" % theme, content["background-color"]) + body_style += "background-color: %s;" % html.escape(background_color) + if content.get("viewport"): + meta_tags += '' % html.escape(content["viewport"]) + if content.get("favicon"): + meta_tags += '' % (root_url, html.escape(content["favicon"])) + if content.get("postmessage_nonce_security"): + postmessage_nonce_security = "true" + + sandbox_permissions = "" + + if "NOSANDBOX" in site.settings["permissions"]: + sandbox_permissions += " allow-same-origin" + + if show_loadingscreen is None: + show_loadingscreen = not site.storage.isFile(file_inner_path) + + return self.render( + "src/Ui/template/wrapper.html", + server_url=server_url, + inner_path=inner_path, + file_url=re.escape(file_url), + file_inner_path=re.escape(file_inner_path), + address=site.address, + title=html.escape(title), + body_style=body_style, + meta_tags=meta_tags, + query_string=re.escape(inner_query_string), + wrapper_key=site.settings["wrapper_key"], + ajax_key=site.settings["ajax_key"], + wrapper_nonce=wrapper_nonce, + postmessage_nonce_security=postmessage_nonce_security, + permissions=json.dumps(site.settings["permissions"]), + show_loadingscreen=json.dumps(show_loadingscreen), + sandbox_permissions=sandbox_permissions, + rev=config.rev, + lang=config.language, + homepage=homepage, + themeclass=themeclass, + script_nonce=script_nonce + ) + + # Create a new wrapper nonce that allows to get one html file without the wrapper + def getWrapperNonce(self): + wrapper_nonce = CryptHash.random() + self.server.wrapper_nonces.append(wrapper_nonce) + return wrapper_nonce + + def getScriptNonce(self): + if not self.script_nonce: + self.script_nonce = CryptHash.random(encoding="base64") + + return self.script_nonce + + # Create a new wrapper nonce that allows to get one site + def getAddNonce(self): + add_nonce = CryptHash.random() + self.server.add_nonces.append(add_nonce) + return add_nonce + + def isSameOrigin(self, url_a, url_b): + if not url_a or not url_b: + return False + + url_a = url_a.replace("/raw/", "/") + url_b = url_b.replace("/raw/", "/") + + origin_pattern = "http[s]{0,1}://(.*?/.*?/).*" + is_origin_full = re.match(origin_pattern, url_a) + if not is_origin_full: # Origin looks trimmed to host, require only same host + origin_pattern = "http[s]{0,1}://(.*?/).*" + + origin_a = re.sub(origin_pattern, "\\1", url_a) + origin_b = re.sub(origin_pattern, "\\1", url_b) + + return origin_a == origin_b + + # Return {address: 1Site.., inner_path: /data/users.json} from url path + def parsePath(self, path): + path = path.replace("\\", "/") + path = path.replace("/index.html/", "/") # Base Backward compatibility fix + if path.endswith("/"): + path = path + "index.html" + + if "../" in path or "./" in path: + raise SecurityError("Invalid path") + + match = re.match(r"/media/(?P
[A-Za-z0-9]+[A-Za-z0-9\._-]+)(?P/.*|$)", path) + if match: + path_parts = match.groupdict() + if self.isDomain(path_parts["address"]): + path_parts["address"] = self.resolveDomain(path_parts["address"]) + path_parts["request_address"] = path_parts["address"] # Original request address (for Merger sites) + path_parts["inner_path"] = path_parts["inner_path"].lstrip("/") + if not path_parts["inner_path"]: + path_parts["inner_path"] = "index.html" + return path_parts + else: + return None + + # Serve a media for site + def actionSiteMedia(self, path, header_length=True, header_noscript=False): + try: + path_parts = self.parsePath(path) + except SecurityError as err: + return self.error403(err) + + if not path_parts: + return self.error404(path) + + address = path_parts["address"] + + file_path = "%s/%s/%s" % (config.data_dir, address, path_parts["inner_path"]) + + if (config.debug or config.merge_media) and file_path.split("/")[-1].startswith("all."): + # If debugging merge *.css to all.css and *.js to all.js + site = self.server.sites.get(address) + if site and site.settings["own"]: + from Debug import DebugMedia + DebugMedia.merge(file_path) + + if not address or address == ".": + return self.error403(path_parts["inner_path"]) + + header_allow_ajax = False + if self.get.get("ajax_key"): + site = SiteManager.site_manager.get(path_parts["request_address"]) + if self.get["ajax_key"] == site.settings["ajax_key"]: + header_allow_ajax = True + else: + return self.error403("Invalid ajax_key") + + file_size = helper.getFilesize(file_path) + + if file_size is not None: + return self.actionFile(file_path, header_length=header_length, header_noscript=header_noscript, header_allow_ajax=header_allow_ajax, file_size=file_size, path_parts=path_parts) + + elif os.path.isdir(file_path): # If this is actually a folder, add "/" and redirect + if path_parts["inner_path"]: + return self.actionRedirect("./%s/" % path_parts["inner_path"].split("/")[-1]) + else: + return self.actionRedirect("./%s/" % path_parts["address"]) + + else: # File not exists, try to download + if address not in SiteManager.site_manager.sites: # Only in case if site already started downloading + return self.actionSiteAddPrompt(path) + + site = SiteManager.site_manager.need(address) + + if path_parts["inner_path"].endswith("favicon.ico"): # Default favicon for all sites + return self.actionFile("src/Ui/media/img/favicon.ico") + + result = site.needFile(path_parts["inner_path"], priority=15) # Wait until file downloads + if result: + file_size = helper.getFilesize(file_path) + return self.actionFile(file_path, header_length=header_length, header_noscript=header_noscript, header_allow_ajax=header_allow_ajax, file_size=file_size, path_parts=path_parts) + else: + self.log.debug("File not found: %s" % path_parts["inner_path"]) + return self.error404(path) + + # Serve a media for ui + def actionUiMedia(self, path): + match = re.match("/uimedia/(?P.*)", path) + if match: # Looks like a valid path + file_path = "src/Ui/media/%s" % match.group("inner_path") + allowed_dir = os.path.abspath("src/Ui/media") # Only files within data/sitehash allowed + if "../" in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir): + # File not in allowed path + return self.error403() + else: + if (config.debug or config.merge_media) and match.group("inner_path").startswith("all."): + # If debugging merge *.css to all.css and *.js to all.js + from Debug import DebugMedia + DebugMedia.merge(file_path) + return self.actionFile(file_path, header_length=False) # Dont's send site to allow plugins append content + + else: # Bad url + return self.error400() + + def actionSiteAdd(self): + post_data = self.env["wsgi.input"].read().decode() + post = dict(urllib.parse.parse_qsl(post_data)) + if post["add_nonce"] not in self.server.add_nonces: + return self.error403("Add nonce error.") + self.server.add_nonces.remove(post["add_nonce"]) + SiteManager.site_manager.need(post["address"]) + return self.actionRedirect(post["url"]) + + @helper.encodeResponse + def actionSiteAddPrompt(self, path): + path_parts = self.parsePath(path) + if not path_parts or not self.server.site_manager.isAddress(path_parts["address"]): + return self.error404(path) + + self.sendHeader(200, "text/html", noscript=True) + template = open("src/Ui/template/site_add.html").read() + template = template.replace("{url}", html.escape(self.env["PATH_INFO"])) + template = template.replace("{address}", path_parts["address"]) + template = template.replace("{add_nonce}", self.getAddNonce()) + return template + + def replaceHtmlVariables(self, block, path_parts): + user = self.getCurrentUser() + themeclass = "theme-%-6s" % re.sub("[^a-z]", "", user.settings.get("theme", "light")) + block = block.replace(b"{themeclass}", themeclass.encode("utf8")) + + if path_parts: + site = self.server.sites.get(path_parts.get("address")) + if site.settings["own"]: + modified = int(time.time()) + else: + modified = int(site.content_manager.contents["content.json"]["modified"]) + block = block.replace(b"{site_modified}", str(modified).encode("utf8")) + + return block + + # Stream a file to client + def actionFile(self, file_path, block_size=64 * 1024, send_header=True, header_length=True, header_noscript=False, header_allow_ajax=False, extra_headers={}, file_size=None, file_obj=None, path_parts=None): + file_name = os.path.basename(file_path) + + if file_size is None: + file_size = helper.getFilesize(file_path) + + if file_size is not None: + # Try to figure out content type by extension + content_type = self.getContentType(file_name) + + range = self.env.get("HTTP_RANGE") + range_start = None + + is_html_file = file_name.endswith(".html") + if is_html_file: + header_length = False + + if send_header: + extra_headers = extra_headers.copy() + content_encoding = self.get.get("zeronet_content_encoding", "") + if all(part.strip() in ("gzip", "compress", "deflate", "identity", "br") for part in content_encoding.split(",")): + extra_headers["Content-Encoding"] = content_encoding + extra_headers["Accept-Ranges"] = "bytes" + if header_length: + extra_headers["Content-Length"] = str(file_size) + if range: + range_start = int(re.match(".*?([0-9]+)", range).group(1)) + if re.match(".*?-([0-9]+)", range): + range_end = int(re.match(".*?-([0-9]+)", range).group(1)) + 1 + else: + range_end = file_size + extra_headers["Content-Length"] = str(range_end - range_start) + extra_headers["Content-Range"] = "bytes %s-%s/%s" % (range_start, range_end - 1, file_size) + if range: + status = 206 + else: + status = 200 + self.sendHeader(status, content_type=content_type, noscript=header_noscript, allow_ajax=header_allow_ajax, extra_headers=extra_headers) + if self.env["REQUEST_METHOD"] != "OPTIONS": + if not file_obj: + file_obj = open(file_path, "rb") + + if range_start: + file_obj.seek(range_start) + while 1: + try: + block = file_obj.read(block_size) + if is_html_file: + block = self.replaceHtmlVariables(block, path_parts) + if block: + yield block + else: + raise StopIteration + except StopIteration: + file_obj.close() + break + else: # File not exists + for part in self.error404(str(file_path)): + yield part + + # On websocket connection + def actionWebsocket(self): + ws = self.env.get("wsgi.websocket") + + if ws: + # Allow only same-origin websocket requests + origin = self.env.get("HTTP_ORIGIN") + host = self.env.get("HTTP_HOST") + # Allow only same-origin websocket requests + if origin: + origin_host = origin.split("://", 1)[-1] + if origin_host != host and origin_host not in self.server.allowed_ws_origins: + error_message = "Invalid origin: %s (host: %s, allowed: %s)" % (origin, host, self.server.allowed_ws_origins) + ws.send(json.dumps({"error": error_message})) + return self.error403(error_message) + + # Find site by wrapper_key + wrapper_key = self.get["wrapper_key"] + site = None + for site_check in list(self.server.sites.values()): + if site_check.settings["wrapper_key"] == wrapper_key: + site = site_check + + if site: # Correct wrapper key + try: + user = self.getCurrentUser() + except Exception as err: + ws.send(json.dumps({"error": "Error in data/user.json: %s" % err})) + return self.error500("Error in data/user.json: %s" % err) + if not user: + ws.send(json.dumps({"error": "No user found"})) + return self.error403("No user found") + ui_websocket = UiWebsocket(ws, site, self.server, user, self) + site.websockets.append(ui_websocket) # Add to site websockets to allow notify on events + self.server.websockets.append(ui_websocket) + ui_websocket.start() + self.server.websockets.remove(ui_websocket) + for site_check in list(self.server.sites.values()): + # Remove websocket from every site (admin sites allowed to join other sites event channels) + if ui_websocket in site_check.websockets: + site_check.websockets.remove(ui_websocket) + return [b"Bye."] + else: # No site found by wrapper key + ws.send(json.dumps({"error": "Wrapper key not found: %s" % wrapper_key})) + return self.error403("Wrapper key not found: %s" % wrapper_key) + else: + self.start_response("400 Bad Request", []) + return [b"Not a websocket request!"] + + # Debug last error + def actionDebug(self): + # Raise last error from DebugHook + import main + last_error = main.DebugHook.last_error + if last_error: + raise last_error[0](last_error[1]).with_traceback(last_error[2]) + else: + self.sendHeader() + return [b"No error! :)"] + + # Just raise an error to get console + def actionConsole(self): + import sys + sites = self.server.sites + main = sys.modules["main"] + + def bench(code, times=100, init=None): + sites = self.server.sites + main = sys.modules["main"] + s = time.time() + if init: + eval(compile(init, '', 'exec'), globals(), locals()) + for _ in range(times): + back = eval(code, globals(), locals()) + return ["%s run: %.3fs" % (times, time.time() - s), back] + raise Exception("Here is your console") + + # - Tests - + + def actionTestStream(self): + self.sendHeader() + yield " " * 1080 # Overflow browser's buffer + yield "He" + time.sleep(1) + yield "llo!" + # yield "Running websockets: %s" % len(self.server.websockets) + # self.server.sendMessage("Hello!") + + # - Errors - + + # Send bad request error + def error400(self, message=""): + self.sendHeader(400, noscript=True) + self.log.error("Error 400: %s" % message) + return self.formatError("Bad Request", message) + + # You are not allowed to access this + def error403(self, message="", details=True): + self.sendHeader(403, noscript=True) + self.log.warning("Error 403: %s" % message) + return self.formatError("Forbidden", message, details=details) + + # Send file not found error + def error404(self, path=""): + self.sendHeader(404, noscript=True) + return self.formatError("Not Found", path, details=False) + + # Internal server error + def error500(self, message=":("): + self.sendHeader(500, noscript=True) + self.log.error("Error 500: %s" % message) + return self.formatError("Server error", message) + + @helper.encodeResponse + def formatError(self, title, message, details=True): + import sys + import gevent + + if details and config.debug: + details = {key: val for key, val in list(self.env.items()) if hasattr(val, "endswith") and "COOKIE" not in key} + details["version_zeronet"] = "%s r%s" % (config.version, config.rev) + details["version_python"] = sys.version + details["version_gevent"] = gevent.__version__ + details["plugins"] = PluginManager.plugin_manager.plugin_names + arguments = {key: val for key, val in vars(config.arguments).items() if "password" not in key} + details["arguments"] = arguments + return """ + +

%s

+

%s

+

Please report it if you think this an error.

+

Details:

+
%s
+ """ % (title, html.escape(message), html.escape(json.dumps(details, indent=4, sort_keys=True))) + else: + return """ + +

%s

+

%s

+ """ % (title, html.escape(message)) diff --git a/src/Ui/UiServer.py b/src/Ui/UiServer.py new file mode 100644 index 00000000..6cd0545c --- /dev/null +++ b/src/Ui/UiServer.py @@ -0,0 +1,206 @@ +import logging +import time +import urllib +import socket +import gevent + +from gevent.pywsgi import WSGIServer +from lib.gevent_ws import WebSocketHandler + +from .UiRequest import UiRequest +from Site import SiteManager +from Config import config +from Debug import Debug +import importlib + + +# Skip websocket handler if not necessary +class UiWSGIHandler(WebSocketHandler): + + def __init__(self, *args, **kwargs): + self.server = args[2] + super(UiWSGIHandler, self).__init__(*args, **kwargs) + self.args = args + self.kwargs = kwargs + + def handleError(self, err): + if config.debug: # Allow websocket errors to appear on /Debug + import main + main.DebugHook.handleError() + else: + ui_request = UiRequest(self.server, {}, self.environ, self.start_response) + block_gen = ui_request.error500("UiWSGIHandler error: %s" % Debug.formatExceptionMessage(err)) + for block in block_gen: + self.write(block) + + def run_application(self): + err_name = "UiWSGIHandler websocket" if "HTTP_UPGRADE" in self.environ else "UiWSGIHandler" + try: + super(UiWSGIHandler, self).run_application() + except (ConnectionAbortedError, ConnectionResetError, BrokenPipeError) as err: + logging.warning("%s connection error: %s" % (err_name, err)) + except Exception as err: + logging.warning("%s error: %s" % (err_name, Debug.formatException(err))) + self.handleError(err) + + def handle(self): + # Save socket to be able to close them properly on exit + self.server.sockets[self.client_address] = self.socket + super(UiWSGIHandler, self).handle() + del self.server.sockets[self.client_address] + + +class UiServer: + def __init__(self): + self.ip = config.ui_ip + self.port = config.ui_port + self.running = False + if self.ip == "*": + self.ip = "0.0.0.0" # Bind all + if config.ui_host: + self.allowed_hosts = set(config.ui_host) + elif config.ui_ip == "127.0.0.1": + # IP Addresses are inherently allowed as they are immune to DNS + # rebinding attacks. + self.allowed_hosts = set(["zero", "localhost:%s" % config.ui_port]) + # "URI producers and normalizers should omit the port component and + # its ':' delimiter if port is empty or if its value would be the + # same as that of the scheme's default." + # Source: https://tools.ietf.org/html/rfc3986#section-3.2.3 + # As a result, we need to support portless hosts if port 80 is in + # use. + if config.ui_port == 80: + self.allowed_hosts.update(["localhost"]) + else: + self.allowed_hosts = set([]) + self.allowed_ws_origins = set() + self.allow_trans_proxy = config.ui_trans_proxy + + self.wrapper_nonces = [] + self.add_nonces = [] + self.websockets = [] + self.site_manager = SiteManager.site_manager + self.sites = SiteManager.site_manager.list() + self.log = logging.getLogger(__name__) + config.error_logger.onNewRecord = self.handleErrorLogRecord + + def handleErrorLogRecord(self, record): + self.updateWebsocket(log_event=record.levelname) + + # After WebUI started + def afterStarted(self): + from util import Platform + Platform.setMaxfilesopened(config.max_files_opened) + + # Handle WSGI request + def handleRequest(self, env, start_response): + path = bytes(env["PATH_INFO"], "raw-unicode-escape").decode("utf8") + if env.get("QUERY_STRING"): + get = dict(urllib.parse.parse_qsl(env['QUERY_STRING'])) + else: + get = {} + ui_request = UiRequest(self, get, env, start_response) + if config.debug: # Let the exception catched by werkezung + return ui_request.route(path) + else: # Catch and display the error + try: + return ui_request.route(path) + except Exception as err: + logging.debug("UiRequest error: %s" % Debug.formatException(err)) + return ui_request.error500("Err: %s" % Debug.formatException(err)) + + # Reload the UiRequest class to prevent restarts in debug mode + def reload(self): + global UiRequest + import imp + import sys + importlib.reload(sys.modules["User.UserManager"]) + importlib.reload(sys.modules["Ui.UiWebsocket"]) + UiRequest = imp.load_source("UiRequest", "src/Ui/UiRequest.py").UiRequest + # UiRequest.reload() + + # Bind and run the server + def start(self): + self.running = True + handler = self.handleRequest + + if config.debug: + # Auto reload UiRequest on change + from Debug import DebugReloader + DebugReloader.watcher.addCallback(self.reload) + + # Werkzeug Debugger + try: + from werkzeug.debug import DebuggedApplication + handler = DebuggedApplication(self.handleRequest, evalex=True) + except Exception as err: + self.log.info("%s: For debugging please download Werkzeug (http://werkzeug.pocoo.org/)" % err) + from Debug import DebugReloader + self.log.write = lambda msg: self.log.debug(msg.strip()) # For Wsgi access.log + self.log.info("--------------------------------------") + if ":" in config.ui_ip: + self.log.info("Web interface: http://[%s]:%s/" % (config.ui_ip, config.ui_port)) + else: + self.log.info("Web interface: http://%s:%s/" % (config.ui_ip, config.ui_port)) + self.log.info("--------------------------------------") + + if config.open_browser and config.open_browser != "False": + logging.info("Opening browser: %s...", config.open_browser) + import webbrowser + try: + if config.open_browser == "default_browser": + browser = webbrowser.get() + else: + browser = webbrowser.get(config.open_browser) + url = "http://%s:%s/%s" % (config.ui_ip if config.ui_ip != "*" else "127.0.0.1", config.ui_port, config.homepage) + gevent.spawn_later(0.3, browser.open, url, new=2) + except Exception as err: + print("Error starting browser: %s" % err) + + self.server = WSGIServer((self.ip, self.port), handler, handler_class=UiWSGIHandler, log=self.log) + self.server.sockets = {} + self.afterStarted() + try: + self.server.serve_forever() + except Exception as err: + self.log.error("Web interface bind error, must be running already, exiting.... %s" % err) + import main + main.file_server.stop() + self.log.info("Stopped.") + + def stop(self): + self.log.debug("Stopping...") + # Close WS sockets + if "clients" in dir(self.server): + for client in list(self.server.clients.values()): + client.ws.close() + # Close http sockets + sock_closed = 0 + for sock in list(self.server.sockets.values()): + try: + sock.send(b"bye") + sock.shutdown(socket.SHUT_RDWR) + # sock._sock.close() + # sock.close() + sock_closed += 1 + except Exception as err: + self.log.debug("Http connection close error: %s" % err) + self.log.debug("Socket closed: %s" % sock_closed) + time.sleep(0.1) + if config.debug: + from Debug import DebugReloader + DebugReloader.watcher.stop() + + self.server.socket.close() + self.server.stop() + self.running = False + time.sleep(1) + + def updateWebsocket(self, **kwargs): + if kwargs: + param = {"event": list(kwargs.items())[0]} + else: + param = None + + for ws in self.websockets: + ws.event("serverChanged", param) diff --git a/src/Ui/UiWebsocket.py b/src/Ui/UiWebsocket.py new file mode 100644 index 00000000..80d53b45 --- /dev/null +++ b/src/Ui/UiWebsocket.py @@ -0,0 +1,1297 @@ +import json +import time +import sys +import os +import shutil +import re +import copy +import logging +import stat + +import gevent + +from Config import config +from Site import SiteManager +from Crypt import CryptBitcoin +from Debug import Debug +from util import QueryJson, RateLimit +from Plugin import PluginManager +from Translate import translate as _ +from util import helper +from util import SafeRe +from util.Flag import flag +from Content.ContentManager import VerifyError, SignError + + +@PluginManager.acceptPlugins +class UiWebsocket(object): + def __init__(self, ws, site, server, user, request): + self.ws = ws + self.site = site + self.user = user + self.log = site.log + self.request = request + self.permissions = [] + self.server = server + self.next_message_id = 1 + self.waiting_cb = {} # Waiting for callback. Key: message_id, Value: function pointer + self.channels = [] # Channels joined to + self.state = {"sending": False} # Shared state of websocket connection + self.send_queue = [] # Messages to send to client + + # Start listener loop + def start(self): + ws = self.ws + if self.site.address == config.homepage and not self.site.page_requested: + # Add open fileserver port message or closed port error to homepage at first request after start + self.site.page_requested = True # Dont add connection notification anymore + import main + file_server = main.file_server + if not file_server.port_opened or file_server.tor_manager.start_onions is None: + self.site.page_requested = False # Not ready yet, check next time + else: + try: + self.addHomepageNotifications() + except Exception as err: + self.log.error("Uncaught Exception: " + Debug.formatException(err)) + + for notification in self.site.notifications: # Send pending notification messages + # send via WebSocket + self.cmd("notification", notification) + # just in case, log them to terminal + if notification[0] == "error": + self.log.error("\n*** %s\n" % self.dedent(notification[1])) + + self.site.notifications = [] + + while True: + try: + if ws.closed: + break + else: + message = ws.receive() + except Exception as err: + self.log.error("WebSocket receive error: %s" % Debug.formatException(err)) + break + + if message: + try: + req = json.loads(message) + self.handleRequest(req) + except Exception as err: + if config.debug: # Allow websocket errors to appear on /Debug + import main + main.DebugHook.handleError() + self.log.error("WebSocket handleRequest error: %s \n %s" % (Debug.formatException(err), message)) + if not self.hasPlugin("Multiuser"): + self.cmd("error", "Internal error: %s" % Debug.formatException(err, "html")) + + self.onClosed() + + def onClosed(self): + pass + + def dedent(self, text): + return re.sub("[\\r\\n\\x20\\t]+", " ", text.strip().replace("
", " ")) + + def addHomepageNotifications(self): + if not(self.hasPlugin("Multiuser")) and not(self.hasPlugin("UiPassword")): + bind_ip = getattr(config, "ui_ip", "") + whitelist = getattr(config, "ui_restrict", []) + # binds to the Internet, no IP whitelist, no UiPassword, no Multiuser + if ("0.0.0.0" == bind_ip or "*" == bind_ip) and (not whitelist): + self.site.notifications.append([ + "error", + _("You are not going to set up a public gateway. However, your Web UI is
" + + "open to the whole Internet.
" + + "Please check your configuration.") + ]) + + def hasPlugin(self, name): + return name in PluginManager.plugin_manager.plugin_names + + # Has permission to run the command + def hasCmdPermission(self, cmd): + flags = flag.db.get(self.getCmdFuncName(cmd), ()) + if "admin" in flags and "ADMIN" not in self.permissions: + return False + else: + return True + + # Has permission to access a site + def hasSitePermission(self, address, cmd=None): + if address != self.site.address and "ADMIN" not in self.site.settings["permissions"]: + return False + else: + return True + + def hasFilePermission(self, inner_path): + valid_signers = self.site.content_manager.getValidSigners(inner_path) + return self.site.settings["own"] or self.user.getAuthAddress(self.site.address) in valid_signers + + # Event in a channel + def event(self, channel, *params): + if channel in self.channels: # We are joined to channel + if channel == "siteChanged": + site = params[0] + site_info = self.formatSiteInfo(site, create_user=False) + if len(params) > 1 and params[1]: # Extra data + site_info.update(params[1]) + self.cmd("setSiteInfo", site_info) + elif channel == "serverChanged": + server_info = self.formatServerInfo() + if len(params) > 0 and params[0]: # Extra data + server_info.update(params[0]) + self.cmd("setServerInfo", server_info) + elif channel == "announcerChanged": + site = params[0] + announcer_info = self.formatAnnouncerInfo(site) + if len(params) > 1 and params[1]: # Extra data + announcer_info.update(params[1]) + self.cmd("setAnnouncerInfo", announcer_info) + + # Send response to client (to = message.id) + def response(self, to, result): + self.send({"cmd": "response", "to": to, "result": result}) + + # Send a command + def cmd(self, cmd, params={}, cb=None): + self.send({"cmd": cmd, "params": params}, cb) + + # Encode to json and send message + def send(self, message, cb=None): + message["id"] = self.next_message_id # Add message id to allow response + self.next_message_id += 1 + if cb: # Callback after client responded + self.waiting_cb[message["id"]] = cb + self.send_queue.append(message) + if self.state["sending"]: + return # Already sending + try: + while self.send_queue: + self.state["sending"] = True + message = self.send_queue.pop(0) + self.ws.send(json.dumps(message)) + self.state["sending"] = False + except Exception as err: + self.log.debug("Websocket send error: %s" % Debug.formatException(err)) + self.state["sending"] = False + + def getPermissions(self, req_id): + permissions = self.site.settings["permissions"] + if req_id >= 1000000: # Its a wrapper command, allow admin commands + permissions = permissions[:] + permissions.append("ADMIN") + return permissions + + def asyncWrapper(self, func): + def asyncErrorWatcher(func, *args, **kwargs): + try: + result = func(*args, **kwargs) + if result is not None: + self.response(args[0], result) + except Exception as err: + if config.debug: # Allow websocket errors to appear on /Debug + import main + main.DebugHook.handleError() + self.log.error("WebSocket handleRequest error: %s" % Debug.formatException(err)) + self.cmd("error", "Internal error: %s" % Debug.formatException(err, "html")) + + def wrapper(*args, **kwargs): + gevent.spawn(asyncErrorWatcher, func, *args, **kwargs) + return wrapper + + def getCmdFuncName(self, cmd): + func_name = "action" + cmd[0].upper() + cmd[1:] + return func_name + + # Handle incoming messages + def handleRequest(self, req): + + cmd = req.get("cmd") + params = req.get("params") + self.permissions = self.getPermissions(req["id"]) + + if cmd == "response": # It's a response to a command + return self.actionResponse(req["to"], req["result"]) + else: # Normal command + func_name = self.getCmdFuncName(cmd) + func = getattr(self, func_name, None) + if self.site.settings.get("deleting"): + return self.response(req["id"], {"error": "Site is deleting"}) + + if not func: # Unknown command + return self.response(req["id"], {"error": "Unknown command: %s" % cmd}) + + if not self.hasCmdPermission(cmd): # Admin commands + return self.response(req["id"], {"error": "You don't have permission to run %s" % cmd}) + + # Execute in parallel + func_flags = flag.db.get(self.getCmdFuncName(cmd), ()) + if func_flags and "async_run" in func_flags: + func = self.asyncWrapper(func) + + # Support calling as named, unnamed parameters and raw first argument too + if type(params) is dict: + result = func(req["id"], **params) + elif type(params) is list: + result = func(req["id"], *params) + elif params: + result = func(req["id"], params) + else: + result = func(req["id"]) + + if result is not None: + self.response(req["id"], result) + + # Format site info + def formatSiteInfo(self, site, create_user=True): + content = site.content_manager.contents.get("content.json", {}) + if content: # Remove unnecessary data transfer + content = content.copy() + content["files"] = len(content.get("files", {})) + content["files_optional"] = len(content.get("files_optional", {})) + content["includes"] = len(content.get("includes", {})) + if "sign" in content: + del(content["sign"]) + if "signs" in content: + del(content["signs"]) + if "signers_sign" in content: + del(content["signers_sign"]) + + settings = site.settings.copy() + del settings["wrapper_key"] # Dont expose wrapper key + + ret = { + "auth_address": self.user.getAuthAddress(site.address, create=create_user), + "cert_user_id": self.user.getCertUserId(site.address), + "address": site.address, + "address_short": site.address_short, + "address_hash": site.address_hash.hex(), + "settings": settings, + "content_updated": site.content_updated, + "bad_files": len(site.bad_files), + "size_limit": site.getSizeLimit(), + "next_size_limit": site.getNextSizeLimit(), + "peers": max(site.settings.get("peers", 0), len(site.peers)), + "started_task_num": site.worker_manager.started_task_num, + "tasks": len(site.worker_manager.tasks), + "workers": len(site.worker_manager.workers), + "content": content + } + if site.settings["own"]: + ret["privatekey"] = bool(self.user.getSiteData(site.address, create=create_user).get("privatekey")) + if site.isServing() and content: + ret["peers"] += 1 # Add myself if serving + return ret + + def formatServerInfo(self): + import main + file_server = main.file_server + if file_server.port_opened == {}: + ip_external = None + else: + ip_external = any(file_server.port_opened.values()) + back = { + "ip_external": ip_external, + "port_opened": file_server.port_opened, + "platform": sys.platform, + "fileserver_ip": config.fileserver_ip, + "fileserver_port": config.fileserver_port, + "tor_enabled": file_server.tor_manager.enabled, + "tor_status": file_server.tor_manager.status, + "tor_has_meek_bridges": file_server.tor_manager.has_meek_bridges, + "tor_use_bridges": config.tor_use_bridges, + "ui_ip": config.ui_ip, + "ui_port": config.ui_port, + "version": config.version, + "rev": config.rev, + "timecorrection": file_server.timecorrection, + "language": config.language, + "debug": config.debug, + "offline": config.offline, + "plugins": PluginManager.plugin_manager.plugin_names, + "plugins_rev": PluginManager.plugin_manager.plugins_rev, + "user_settings": self.user.settings + } + if "ADMIN" in self.site.settings["permissions"]: + back["updatesite"] = config.updatesite + back["dist_type"] = config.dist_type + back["lib_verify_best"] = CryptBitcoin.lib_verify_best + back["passive_mode"] = file_server.passive_mode + return back + + def formatAnnouncerInfo(self, site): + return {"address": site.address, "stats": site.announcer.stats} + + # - Actions - + + def actionAs(self, to, address, cmd, params=[]): + if not self.hasSitePermission(address, cmd=cmd): + return self.response(to, "No permission for site %s" % address) + req_self = copy.copy(self) + req_self.site = self.server.sites.get(address) + req_self.hasCmdPermission = self.hasCmdPermission # Use the same permissions as current site + req_obj = super(UiWebsocket, req_self) + req = {"id": to, "cmd": cmd, "params": params} + req_obj.handleRequest(req) + + # Do callback on response {"cmd": "response", "to": message_id, "result": result} + def actionResponse(self, to, result): + if to in self.waiting_cb: + self.waiting_cb[to](result) # Call callback function + else: + self.log.error("Websocket callback not found: %s, %s" % (to, result)) + + # Send a simple pong answer + def actionPing(self, to): + self.response(to, "pong") + + # Send site details + def actionSiteInfo(self, to, file_status=None): + ret = self.formatSiteInfo(self.site) + if file_status: # Client queries file status + if self.site.storage.isFile(file_status): # File exist, add event done + ret["event"] = ("file_done", file_status) + self.response(to, ret) + + def actionSiteBadFiles(self, to): + return list(self.site.bad_files.keys()) + + # Join to an event channel + def actionChannelJoin(self, to, channels): + if type(channels) != list: + channels = [channels] + + for channel in channels: + if channel not in self.channels: + self.channels.append(channel) + + self.response(to, "ok") + + # Server variables + def actionServerInfo(self, to): + back = self.formatServerInfo() + self.response(to, back) + + # Create a new wrapper nonce that allows to load html file + @flag.admin + def actionServerGetWrapperNonce(self, to): + wrapper_nonce = self.request.getWrapperNonce() + self.response(to, wrapper_nonce) + + def actionAnnouncerInfo(self, to): + back = self.formatAnnouncerInfo(self.site) + self.response(to, back) + + @flag.admin + def actionAnnouncerStats(self, to): + back = {} + trackers = self.site.announcer.getTrackers() + for site in list(self.server.sites.values()): + for tracker, stats in site.announcer.stats.items(): + if tracker not in trackers: + continue + if tracker not in back: + back[tracker] = {} + is_latest_data = bool(stats["time_request"] > back[tracker].get("time_request", 0) and stats["status"]) + for key, val in stats.items(): + if key.startswith("num_"): + back[tracker][key] = back[tracker].get(key, 0) + val + elif is_latest_data: + back[tracker][key] = val + + return back + + # Sign content.json + def actionSiteSign(self, to, privatekey=None, inner_path="content.json", remove_missing_optional=False, update_changed_files=False, response_ok=True): + self.log.debug("Signing: %s" % inner_path) + site = self.site + extend = {} # Extended info for signing + + # Change to the file's content.json + file_info = site.content_manager.getFileInfo(inner_path) + if not inner_path.endswith("content.json"): + if not file_info: + raise Exception("Invalid content.json file: %s" % inner_path) + inner_path = file_info["content_inner_path"] + + # Add certificate to user files + is_user_content = file_info and ("cert_signers" in file_info or "cert_signers_pattern" in file_info) + if is_user_content and privatekey is None: + cert = self.user.getCert(self.site.address) + extend["cert_auth_type"] = cert["auth_type"] + extend["cert_user_id"] = self.user.getCertUserId(site.address) + extend["cert_sign"] = cert["cert_sign"] + self.log.debug("Extending content.json with cert %s" % extend["cert_user_id"]) + + if not self.hasFilePermission(inner_path): + self.log.error("SiteSign error: you don't own this site & site owner doesn't allow you to do so.") + return self.response(to, {"error": "Forbidden, you can only modify your own sites"}) + + if privatekey == "stored": # Get privatekey from sites.json + privatekey = self.user.getSiteData(self.site.address).get("privatekey") + if not privatekey: + self.cmd("notification", ["error", _["Content signing failed"] + "
Private key not found in sites.json "]) + self.response(to, {"error": "Site sign failed: Private key not stored."}) + self.log.error("Site sign failed: %s: Private key not stored in sites.json" % inner_path) + return + if not privatekey: # Get privatekey from users.json auth_address + privatekey = self.user.getAuthPrivatekey(self.site.address) + + # Signing + # Reload content.json, ignore errors to make it up-to-date + site.content_manager.loadContent(inner_path, add_bad_files=False, force=True) + # Sign using private key sent by user + try: + site.content_manager.sign(inner_path, privatekey, extend=extend, update_changed_files=update_changed_files, remove_missing_optional=remove_missing_optional) + except (VerifyError, SignError) as err: + self.cmd("notification", ["error", _["Content signing failed"] + "
%s" % err]) + self.response(to, {"error": "Site sign failed: %s" % err}) + self.log.error("Site sign failed: %s: %s" % (inner_path, Debug.formatException(err))) + return + except Exception as err: + self.cmd("notification", ["error", _["Content signing error"] + "
%s" % Debug.formatException(err)]) + self.response(to, {"error": "Site sign error: %s" % Debug.formatException(err)}) + self.log.error("Site sign error: %s: %s" % (inner_path, Debug.formatException(err))) + return + + site.content_manager.loadContent(inner_path, add_bad_files=False) # Load new content.json, ignore errors + + if update_changed_files: + self.site.updateWebsocket(file_done=inner_path) + + if response_ok: + self.response(to, "ok") + else: + return inner_path + + # Sign and publish content.json + def actionSitePublish(self, to, privatekey=None, inner_path="content.json", sign=True, remove_missing_optional=False, update_changed_files=False): + if sign: + inner_path = self.actionSiteSign( + to, privatekey, inner_path, response_ok=False, + remove_missing_optional=remove_missing_optional, update_changed_files=update_changed_files + ) + if not inner_path: + return + # Publishing + if not self.site.settings["serving"]: # Enable site if paused + self.site.settings["serving"] = True + self.site.saveSettings() + self.site.announce() + + if inner_path not in self.site.content_manager.contents: + return self.response(to, {"error": "File %s not found" % inner_path}) + + event_name = "publish %s %s" % (self.site.address, inner_path) + called_instantly = RateLimit.isAllowed(event_name, 30) + thread = RateLimit.callAsync(event_name, 30, self.doSitePublish, self.site, inner_path) # Only publish once in 30 seconds + notification = "linked" not in dir(thread) # Only display notification on first callback + thread.linked = True + if called_instantly: # Allowed to call instantly + # At the end callback with request id and thread + self.cmd("progress", ["publish", _["Content published to {0}/{1} peers."].format(0, 5), 0]) + thread.link(lambda thread: self.cbSitePublish(to, self.site, thread, notification, callback=notification)) + else: + self.cmd( + "notification", + ["info", _["Content publish queued for {0:.0f} seconds."].format(RateLimit.delayLeft(event_name, 30)), 5000] + ) + self.response(to, "ok") + # At the end display notification + thread.link(lambda thread: self.cbSitePublish(to, self.site, thread, notification, callback=False)) + + def doSitePublish(self, site, inner_path): + def cbProgress(published, limit): + progress = int(float(published) / limit * 100) + self.cmd("progress", [ + "publish", + _["Content published to {0}/{1} peers."].format(published, limit), + progress + ]) + diffs = site.content_manager.getDiffs(inner_path) + back = site.publish(limit=5, inner_path=inner_path, diffs=diffs, cb_progress=cbProgress) + if back == 0: # Failed to publish to anyone + self.cmd("progress", ["publish", _["Content publish failed."], -100]) + else: + cbProgress(back, back) + return back + + # Callback of site publish + def cbSitePublish(self, to, site, thread, notification=True, callback=True): + published = thread.value + if published > 0: # Successfully published + if notification: + # self.cmd("notification", ["done", _["Content published to {0} peers."].format(published), 5000]) + site.updateWebsocket() # Send updated site data to local websocket clients + if callback: + self.response(to, "ok") + else: + if len(site.peers) == 0: + import main + if any(main.file_server.port_opened.values()) or main.file_server.tor_manager.start_onions: + if notification: + self.cmd("notification", ["info", _["No peers found, but your content is ready to access."]]) + if callback: + self.response(to, "ok") + else: + if notification: + self.cmd("notification", [ + "info", + _("""{_[Your network connection is restricted. Please, open {0} port]}
+ {_[on your router to make your site accessible for everyone.]}""").format(config.fileserver_port) + ]) + if callback: + self.response(to, {"error": "Port not opened."}) + + else: + if notification: + self.response(to, {"error": "Content publish failed."}) + + def actionSiteReload(self, to, inner_path): + self.site.content_manager.loadContent(inner_path, add_bad_files=False) + self.site.storage.verifyFiles(quick_check=True) + self.site.updateWebsocket() + return "ok" + + # Write a file to disk + def actionFileWrite(self, to, inner_path, content_base64, ignore_bad_files=False): + valid_signers = self.site.content_manager.getValidSigners(inner_path) + auth_address = self.user.getAuthAddress(self.site.address) + if not self.hasFilePermission(inner_path): + self.log.error("FileWrite forbidden %s not in valid_signers %s" % (auth_address, valid_signers)) + return self.response(to, {"error": "Forbidden, you can only modify your own files"}) + + # Try not to overwrite files currently in sync + content_inner_path = re.sub("^(.*)/.*?$", "\\1/content.json", inner_path) # Also check the content.json from same directory + if (self.site.bad_files.get(inner_path) or self.site.bad_files.get(content_inner_path)) and not ignore_bad_files: + found = self.site.needFile(inner_path, update=True, priority=10) + if not found: + self.cmd( + "confirm", + [_["This file still in sync, if you write it now, then the previous content may be lost."], _["Write content anyway"]], + lambda res: self.actionFileWrite(to, inner_path, content_base64, ignore_bad_files=True) + ) + return False + + try: + import base64 + content = base64.b64decode(content_base64) + # Save old file to generate patch later + if ( + inner_path.endswith(".json") and not inner_path.endswith("content.json") and + self.site.storage.isFile(inner_path) and not self.site.storage.isFile(inner_path + "-old") + ): + try: + self.site.storage.rename(inner_path, inner_path + "-old") + except Exception: + # Rename failed, fall back to standard file write + f_old = self.site.storage.open(inner_path, "rb") + f_new = self.site.storage.open(inner_path + "-old", "wb") + shutil.copyfileobj(f_old, f_new) + + self.site.storage.write(inner_path, content) + except Exception as err: + self.log.error("File write error: %s" % Debug.formatException(err)) + return self.response(to, {"error": "Write error: %s" % Debug.formatException(err)}) + + if inner_path.endswith("content.json"): + self.site.content_manager.loadContent(inner_path, add_bad_files=False, force=True) + + self.response(to, "ok") + + # Send sitechanged to other local users + for ws in self.site.websockets: + if ws != self: + ws.event("siteChanged", self.site, {"event": ["file_done", inner_path]}) + + def actionFileDelete(self, to, inner_path): + if not self.hasFilePermission(inner_path): + self.log.error("File delete error: you don't own this site & you are not approved by the owner.") + return self.response(to, {"error": "Forbidden, you can only modify your own files"}) + + need_delete = True + file_info = self.site.content_manager.getFileInfo(inner_path) + if file_info and file_info.get("optional"): + # Non-existing optional files won't be removed from content.json, so we have to do it manually + self.log.debug("Deleting optional file: %s" % inner_path) + relative_path = file_info["relative_path"] + content_json = self.site.storage.loadJson(file_info["content_inner_path"]) + if relative_path in content_json.get("files_optional", {}): + del content_json["files_optional"][relative_path] + self.site.storage.writeJson(file_info["content_inner_path"], content_json) + self.site.content_manager.loadContent(file_info["content_inner_path"], add_bad_files=False, force=True) + need_delete = self.site.storage.isFile(inner_path) # File sill exists after removing from content.json (owned site) + + if need_delete: + try: + self.site.storage.delete(inner_path) + except Exception as err: + self.log.error("File delete error: %s" % err) + return self.response(to, {"error": "Delete error: %s" % Debug.formatExceptionMessage(err)}) + + self.response(to, "ok") + + # Send sitechanged to other local users + for ws in self.site.websockets: + if ws != self: + ws.event("siteChanged", self.site, {"event": ["file_deleted", inner_path]}) + + # Find data in json files + def actionFileQuery(self, to, dir_inner_path, query=None): + # s = time.time() + dir_path = self.site.storage.getPath(dir_inner_path) + rows = list(QueryJson.query(dir_path, query or "")) + # self.log.debug("FileQuery %s %s done in %s" % (dir_inner_path, query, time.time()-s)) + return self.response(to, rows) + + # List files in directory + @flag.async_run + def actionFileList(self, to, inner_path): + try: + return list(self.site.storage.walk(inner_path)) + except Exception as err: + self.log.error("fileList %s error: %s" % (inner_path, Debug.formatException(err))) + return {"error": Debug.formatExceptionMessage(err)} + + # List directories in a directory + @flag.async_run + def actionDirList(self, to, inner_path, stats=False): + try: + if stats: + back = [] + for file_name in self.site.storage.list(inner_path): + file_stats = os.stat(self.site.storage.getPath(inner_path + "/" + file_name)) + is_dir = stat.S_ISDIR(file_stats.st_mode) + back.append( + {"name": file_name, "size": file_stats.st_size, "is_dir": is_dir} + ) + return back + else: + return list(self.site.storage.list(inner_path)) + except Exception as err: + self.log.error("dirList %s error: %s" % (inner_path, Debug.formatException(err))) + return {"error": Debug.formatExceptionMessage(err)} + + # Sql query + def actionDbQuery(self, to, query, params=None, wait_for=None): + if config.debug or config.verbose: + s = time.time() + rows = [] + try: + res = self.site.storage.query(query, params) + except Exception as err: # Response the error to client + self.log.error("DbQuery error: %s" % Debug.formatException(err)) + return self.response(to, {"error": Debug.formatExceptionMessage(err)}) + # Convert result to dict + for row in res: + rows.append(dict(row)) + if config.verbose and time.time() - s > 0.1: # Log slow query + self.log.debug("Slow query: %s (%.3fs)" % (query, time.time() - s)) + return self.response(to, rows) + + # Return file content + @flag.async_run + def actionFileGet(self, to, inner_path, required=True, format="text", timeout=300, priority=6): + try: + if required or inner_path in self.site.bad_files: + with gevent.Timeout(timeout): + self.site.needFile(inner_path, priority=priority) + body = self.site.storage.read(inner_path, "rb") + except (Exception, gevent.Timeout) as err: + self.log.debug("%s fileGet error: %s" % (inner_path, Debug.formatException(err))) + body = None + + if not body: + body = None + elif format == "base64": + import base64 + body = base64.b64encode(body).decode() + else: + try: + body = body.decode() + except Exception as err: + self.response(to, {"error": "Error decoding text: %s" % err}) + self.response(to, body) + + @flag.async_run + def actionFileNeed(self, to, inner_path, timeout=300, priority=6): + try: + with gevent.Timeout(timeout): + self.site.needFile(inner_path, priority=priority) + except (Exception, gevent.Timeout) as err: + return self.response(to, {"error": Debug.formatExceptionMessage(err)}) + return self.response(to, "ok") + + def actionFileRules(self, to, inner_path, use_my_cert=False, content=None): + if not content: # No content defined by function call + content = self.site.content_manager.contents.get(inner_path) + + if not content: # File not created yet + cert = self.user.getCert(self.site.address) + if cert and cert["auth_address"] in self.site.content_manager.getValidSigners(inner_path): + # Current selected cert if valid for this site, add it to query rules + content = {} + content["cert_auth_type"] = cert["auth_type"] + content["cert_user_id"] = self.user.getCertUserId(self.site.address) + content["cert_sign"] = cert["cert_sign"] + + rules = self.site.content_manager.getRules(inner_path, content) + if inner_path.endswith("content.json") and rules: + if content: + rules["current_size"] = len(json.dumps(content)) + sum([file["size"] for file in list(content.get("files", {}).values())]) + else: + rules["current_size"] = 0 + return self.response(to, rules) + + # Add certificate to user + def actionCertAdd(self, to, domain, auth_type, auth_user_name, cert): + try: + res = self.user.addCert(self.user.getAuthAddress(self.site.address), domain, auth_type, auth_user_name, cert) + if res is True: + self.cmd( + "notification", + ["done", _("{_[New certificate added]:} {auth_type}/{auth_user_name}@{domain}.")] + ) + self.user.setCert(self.site.address, domain) + self.site.updateWebsocket(cert_changed=domain) + self.response(to, "ok") + elif res is False: + # Display confirmation of change + cert_current = self.user.certs[domain] + body = _("{_[Your current certificate]:} {cert_current[auth_type]}/{cert_current[auth_user_name]}@{domain}") + self.cmd( + "confirm", + [body, _("Change it to {auth_type}/{auth_user_name}@{domain}")], + lambda res: self.cbCertAddConfirm(to, domain, auth_type, auth_user_name, cert) + ) + else: + self.response(to, "Not changed") + except Exception as err: + self.log.error("CertAdd error: Exception - %s (%s)" % (err.message, Debug.formatException(err))) + self.response(to, {"error": err.message}) + + def cbCertAddConfirm(self, to, domain, auth_type, auth_user_name, cert): + self.user.deleteCert(domain) + self.user.addCert(self.user.getAuthAddress(self.site.address), domain, auth_type, auth_user_name, cert) + self.cmd( + "notification", + ["done", _("Certificate changed to: {auth_type}/{auth_user_name}@{domain}.")] + ) + self.user.setCert(self.site.address, domain) + self.site.updateWebsocket(cert_changed=domain) + self.response(to, "ok") + + # Select certificate for site + def actionCertSelect(self, to, accepted_domains=[], accept_any=False, accepted_pattern=None): + accounts = [] + accounts.append(["", _["No certificate"], ""]) # Default option + active = "" # Make it active if no other option found + + # Add my certs + auth_address = self.user.getAuthAddress(self.site.address) # Current auth address + site_data = self.user.getSiteData(self.site.address) # Current auth address + + if not accepted_domains and not accepted_pattern: # Accept any if no filter defined + accept_any = True + + for domain, cert in list(self.user.certs.items()): + if auth_address == cert["auth_address"] and domain == site_data.get("cert"): + active = domain + title = cert["auth_user_name"] + "@" + domain + accepted_pattern_match = accepted_pattern and SafeRe.match(accepted_pattern, domain) + if domain in accepted_domains or accept_any or accepted_pattern_match: + accounts.append([domain, title, ""]) + else: + accounts.append([domain, title, "disabled"]) + + # Render the html + body = "" + _["Select account you want to use in this site:"] + "" + # Accounts + for domain, account, css_class in accounts: + if domain == active: + css_class += " active" # Currently selected option + title = _("%s ({_[currently selected]})") % account + else: + title = "%s" % account + body += "%s" % (css_class, domain, title) + # More available providers + more_domains = [domain for domain in accepted_domains if domain not in self.user.certs] # Domains we not displayed yet + if more_domains: + # body+= "Accepted authorization providers by the site:" + body += "
" + for domain in more_domains: + body += _(""" + + {_[Register]} »{domain} + + """) + body += "
" + + script = """ + $(".notification .select.cert").on("click", function() { + $(".notification .select").removeClass('active') + zeroframe.response(%s, this.title) + return false + }) + """ % self.next_message_id + + self.cmd("notification", ["ask", body], lambda domain: self.actionCertSet(to, domain)) + self.cmd("injectScript", script) + + # - Admin actions - + + @flag.admin + def actionPermissionAdd(self, to, permission): + if permission not in self.site.settings["permissions"]: + self.site.settings["permissions"].append(permission) + self.site.saveSettings() + self.site.updateWebsocket(permission_added=permission) + self.response(to, "ok") + + @flag.admin + def actionPermissionRemove(self, to, permission): + self.site.settings["permissions"].remove(permission) + self.site.saveSettings() + self.site.updateWebsocket(permission_removed=permission) + self.response(to, "ok") + + @flag.admin + def actionPermissionDetails(self, to, permission): + if permission == "ADMIN": + self.response(to, _["Modify your client's configuration and access all site"] + " " + _["(Dangerous!)"] + "") + elif permission == "NOSANDBOX": + self.response(to, _["Modify your client's configuration and access all site"] + " " + _["(Dangerous!)"] + "") + elif permission == "PushNotification": + self.response(to, _["Send notifications"]) + else: + self.response(to, "") + + # Set certificate that used for authenticate user for site + @flag.admin + def actionCertSet(self, to, domain): + self.user.setCert(self.site.address, domain) + self.site.updateWebsocket(cert_changed=domain) + self.response(to, "ok") + + # List user's certificates + @flag.admin + def actionCertList(self, to): + back = [] + auth_address = self.user.getAuthAddress(self.site.address) + for domain, cert in list(self.user.certs.items()): + back.append({ + "auth_address": cert["auth_address"], + "auth_type": cert["auth_type"], + "auth_user_name": cert["auth_user_name"], + "domain": domain, + "selected": cert["auth_address"] == auth_address + }) + return back + + # List all site info + @flag.admin + def actionSiteList(self, to, connecting_sites=False): + ret = [] + for site in list(self.server.sites.values()): + if not site.content_manager.contents.get("content.json") and not connecting_sites: + continue # Incomplete site + ret.append(self.formatSiteInfo(site, create_user=False)) # Dont generate the auth_address on listing + self.response(to, ret) + + # Join to an event channel on all sites + @flag.admin + def actionChannelJoinAllsite(self, to, channel): + if channel not in self.channels: # Add channel to channels + self.channels.append(channel) + + for site in list(self.server.sites.values()): # Add websocket to every channel + if self not in site.websockets: + site.websockets.append(self) + + self.response(to, "ok") + + # Update site content.json + def actionSiteUpdate(self, to, address, check_files=False, verify_files=False, since=None, announce=False): + def updateThread(): + site.update(announce=announce, check_files=check_files, verify_files=verify_files, since=since) + self.response(to, "Updated") + + site = self.server.sites.get(address) + if site and (site.address == self.site.address or "ADMIN" in self.site.settings["permissions"]): + if not site.settings["serving"]: + site.settings["serving"] = True + site.saveSettings() + + gevent.spawn(updateThread) + else: + self.response(to, {"error": "Unknown site: %s" % address}) + + # Pause site serving + @flag.admin + def actionSitePause(self, to, address): + site = self.server.sites.get(address) + if site: + site.settings["serving"] = False + site.saveSettings() + site.updateWebsocket() + site.worker_manager.stopWorkers() + self.response(to, "Paused") + else: + self.response(to, {"error": "Unknown site: %s" % address}) + + # Resume site serving + @flag.admin + def actionSiteResume(self, to, address): + site = self.server.sites.get(address) + if site: + site.settings["serving"] = True + site.saveSettings() + gevent.spawn(site.update, announce=True) + time.sleep(0.001) # Wait for update thread starting + site.updateWebsocket() + self.response(to, "Resumed") + else: + self.response(to, {"error": "Unknown site: %s" % address}) + + @flag.admin + @flag.no_multiuser + def actionSiteDelete(self, to, address): + site = self.server.sites.get(address) + if site: + site.delete() + self.user.deleteSiteData(address) + self.response(to, "Deleted") + import gc + gc.collect(2) + else: + self.response(to, {"error": "Unknown site: %s" % address}) + + def cbSiteClone(self, to, address, root_inner_path="", target_address=None, redirect=True): + self.cmd("notification", ["info", _["Cloning site..."]]) + site = self.server.sites.get(address) + response = {} + if target_address: + target_site = self.server.sites.get(target_address) + privatekey = self.user.getSiteData(target_site.address).get("privatekey") + site.clone(target_address, privatekey, root_inner_path=root_inner_path) + self.cmd("notification", ["done", _["Site source code upgraded!"]]) + site.publish() + response = {"address": target_address} + else: + # Generate a new site from user's bip32 seed + new_address, new_address_index, new_site_data = self.user.getNewSiteData() + new_site = site.clone(new_address, new_site_data["privatekey"], address_index=new_address_index, root_inner_path=root_inner_path) + new_site.settings["own"] = True + new_site.saveSettings() + self.cmd("notification", ["done", _["Site cloned"]]) + if redirect: + self.cmd("redirect", "/%s" % new_address) + gevent.spawn(new_site.announce) + response = {"address": new_address} + self.response(to, response) + return "ok" + + @flag.no_multiuser + def actionSiteClone(self, to, address, root_inner_path="", target_address=None, redirect=True): + if not SiteManager.site_manager.isAddress(address): + self.response(to, {"error": "Not a site: %s" % address}) + return + + if not self.server.sites.get(address): + # Don't expose site existence + return + + site = self.server.sites.get(address) + if site.bad_files: + for bad_inner_path in list(site.bad_files.keys()): + is_user_file = "cert_signers" in site.content_manager.getRules(bad_inner_path) + if not is_user_file and bad_inner_path != "content.json": + self.cmd("notification", ["error", _["Clone error: Site still in sync"]]) + return {"error": "Site still in sync"} + + if "ADMIN" in self.getPermissions(to): + self.cbSiteClone(to, address, root_inner_path, target_address, redirect) + else: + self.cmd( + "confirm", + [_["Clone site %s?"] % address, _["Clone"]], + lambda res: self.cbSiteClone(to, address, root_inner_path, target_address, redirect) + ) + + @flag.admin + @flag.no_multiuser + def actionSiteSetLimit(self, to, size_limit): + self.site.settings["size_limit"] = int(size_limit) + self.site.saveSettings() + self.response(to, "ok") + self.site.updateWebsocket() + self.site.download(blind_includes=True) + + @flag.admin + def actionSiteAdd(self, to, address): + site_manager = SiteManager.site_manager + if address in site_manager.sites: + return {"error": "Site already added"} + else: + if site_manager.need(address): + return "ok" + else: + return {"error": "Invalid address"} + + @flag.async_run + def actionSiteListModifiedFiles(self, to, content_inner_path="content.json"): + content = self.site.content_manager.contents.get(content_inner_path) + if not content: + return {"error": "content file not avaliable"} + + min_mtime = content.get("modified", 0) + site_path = self.site.storage.directory + modified_files = [] + + # Load cache if not signed since last modified check + if content.get("modified", 0) < self.site.settings["cache"].get("time_modified_files_check", 0): + min_mtime = self.site.settings["cache"].get("time_modified_files_check") + modified_files = self.site.settings["cache"].get("modified_files", []) + + inner_paths = [content_inner_path] + list(content.get("includes", {}).keys()) + list(content.get("files", {}).keys()) + + if len(inner_paths) > 100: + return {"error": "Too many files in content.json"} + + for relative_inner_path in inner_paths: + inner_path = helper.getDirname(content_inner_path) + relative_inner_path + try: + is_mtime_newer = os.path.getmtime(self.site.storage.getPath(inner_path)) > min_mtime + 1 + if is_mtime_newer: + if inner_path.endswith("content.json"): + is_modified = self.site.content_manager.isModified(inner_path) + else: + previous_size = content["files"][inner_path]["size"] + is_same_size = self.site.storage.getSize(inner_path) == previous_size + ext = inner_path.rsplit(".", 1)[-1] + is_text_file = ext in ["json", "txt", "html", "js", "css"] + if is_same_size: + if is_text_file: + is_modified = self.site.content_manager.isModified(inner_path) # Check sha512 hash + else: + is_modified = False + else: + is_modified = True + + # Check ran, modified back to original value, but in the cache + if not is_modified and inner_path in modified_files: + modified_files.remove(inner_path) + else: + is_modified = False + except Exception as err: + if not self.site.storage.isFile(inner_path): # File deleted + is_modified = True + else: + raise err + if is_modified and inner_path not in modified_files: + modified_files.append(inner_path) + + self.site.settings["cache"]["time_modified_files_check"] = time.time() + self.site.settings["cache"]["modified_files"] = modified_files + return {"modified_files": modified_files} + + @flag.admin + def actionSiteSetSettingsValue(self, to, key, value): + if key not in ["modified_files_notification"]: + return {"error": "Can't change this key"} + + self.site.settings[key] = value + + return "ok" + + def actionUserGetSettings(self, to): + settings = self.user.sites.get(self.site.address, {}).get("settings", {}) + self.response(to, settings) + + def actionUserSetSettings(self, to, settings): + self.user.setSiteSettings(self.site.address, settings) + self.response(to, "ok") + + def actionUserGetGlobalSettings(self, to): + settings = self.user.settings + self.response(to, settings) + + @flag.admin + def actionUserSetGlobalSettings(self, to, settings): + self.user.settings = settings + self.user.save() + self.response(to, "ok") + + @flag.admin + @flag.no_multiuser + def actionServerErrors(self, to): + return config.error_logger.lines + + @flag.admin + @flag.no_multiuser + def actionServerUpdate(self, to): + def cbServerUpdate(res): + self.response(to, res) + if not res: + return False + for websocket in self.server.websockets: + websocket.cmd( + "notification", + ["info", _["Updating ZeroNet client, will be back in a few minutes..."], 20000] + ) + websocket.cmd("updating") + + import main + main.update_after_shutdown = True + main.restart_after_shutdown = True + SiteManager.site_manager.save() + main.file_server.stop() + main.ui_server.stop() + + self.cmd( + "confirm", + [_["Update ZeroNet client to latest version?"], _["Update"]], + cbServerUpdate + ) + + @flag.admin + @flag.async_run + @flag.no_multiuser + def actionServerPortcheck(self, to): + import main + file_server = main.file_server + file_server.portCheck() + self.response(to, file_server.port_opened) + + @flag.admin + @flag.no_multiuser + def actionServerSetPassiveMode(self, to, passive_mode=False): + import main + file_server = main.file_server + if file_server.isPassiveMode() != passive_mode: + file_server.setPassiveMode(passive_mode) + if file_server.isPassiveMode(): + self.cmd("notification", ["info", _["Passive mode enabled"], 5000]) + else: + self.cmd("notification", ["info", _["Passive mode disabled"], 5000]) + self.server.updateWebsocket() + + @flag.admin + @flag.no_multiuser + def actionServerSetOfflineMode(self, to, offline_mode=False): + import main + file_server = main.file_server + if file_server.isOfflineMode() != offline_mode: + file_server.setOfflineMode(offline_mode) + if file_server.isOfflineMode(): + self.cmd("notification", ["info", _["Offline mode enabled"], 5000]) + else: + self.cmd("notification", ["info", _["Offline mode disabled"], 5000]) + self.server.updateWebsocket() + + @flag.admin + @flag.no_multiuser + def actionServerShutdown(self, to, restart=False): + import main + def cbServerShutdown(res): + self.response(to, res) + if not res: + return False + if restart: + main.restart_after_shutdown = True + main.file_server.stop(ui_websocket=self) + main.ui_server.stop() + + if restart: + message = [_["Restart ZeroNet client?"], _["Restart"]] + else: + message = [_["Shut down ZeroNet client?"], _["Shut down"]] + self.cmd("confirm", message, cbServerShutdown) + + @flag.admin + @flag.no_multiuser + def actionServerShowdirectory(self, to, directory="backup", inner_path=""): + if self.request.env["REMOTE_ADDR"] != "127.0.0.1": + return self.response(to, {"error": "Only clients from 127.0.0.1 allowed to run this command"}) + + import webbrowser + if directory == "backup": + path = os.path.abspath(config.data_dir) + elif directory == "log": + path = os.path.abspath(config.log_dir) + elif directory == "site": + path = os.path.abspath(self.site.storage.getPath(helper.getDirname(inner_path))) + + if os.path.isdir(path): + self.log.debug("Opening: %s" % path) + webbrowser.open('file://' + path) + return self.response(to, "ok") + else: + return self.response(to, {"error": "Not a directory"}) + + @flag.admin + @flag.no_multiuser + def actionConfigSet(self, to, key, value): + import main + + self.log.debug("Changing config %s value to %r" % (key, value)) + if key not in config.keys_api_change_allowed: + self.response(to, {"error": "Forbidden: You cannot set this config key"}) + return + + if key == "open_browser": + if value not in ["default_browser", "False"]: + self.response(to, {"error": "Forbidden: Invalid value"}) + return + + # Remove empty lines from lists + if type(value) is list: + value = [line for line in value if line] + + config.saveValue(key, value) + + if key not in config.keys_restart_need: + if value is None: # Default value + setattr(config, key, config.parser.get_default(key)) + setattr(config.arguments, key, config.parser.get_default(key)) + else: + setattr(config, key, value) + setattr(config.arguments, key, value) + else: + config.need_restart = True + config.pending_changes[key] = value + + if key == "language": + import Translate + for translate in Translate.translates: + translate.setLanguage(value) + message = _["You have successfully changed the web interface's language!"] + "
" + message += _["Due to the browser's caching, the full transformation could take some minute."] + self.cmd("notification", ["done", message, 10000]) + + if key == "tor_use_bridges": + if value is None: + value = False + else: + value = True + tor_manager = main.file_server.tor_manager + tor_manager.request("SETCONF UseBridges=%i" % value) + + if key == "trackers_file": + config.loadTrackersFile() + + if key == "log_level": + logging.getLogger('').setLevel(logging.getLevelName(config.log_level)) + + if key == "ip_external": + gevent.spawn(main.file_server.portCheck) + + if key == "offline": + if value: + main.file_server.closeConnections() + else: + gevent.spawn(main.file_server.checkSites, check_files=False, force_port_check=True) + + self.response(to, "ok") diff --git a/src/Ui/__init__.py b/src/Ui/__init__.py new file mode 100644 index 00000000..dcb8896d --- /dev/null +++ b/src/Ui/__init__.py @@ -0,0 +1,3 @@ +from .UiServer import UiServer +from .UiRequest import UiRequest +from .UiWebsocket import UiWebsocket \ No newline at end of file diff --git a/src/Ui/media/Fixbutton.coffee b/src/Ui/media/Fixbutton.coffee new file mode 100644 index 00000000..954d2b56 --- /dev/null +++ b/src/Ui/media/Fixbutton.coffee @@ -0,0 +1,32 @@ +class Fixbutton + constructor: -> + @dragging = false + $(".fixbutton-bg").on "mouseover", -> + $(".fixbutton-bg").stop().animate({"scale": 0.7}, 800, "easeOutElastic") + $(".fixbutton-burger").stop().animate({"opacity": 1.5, "left": 0}, 800, "easeOutElastic") + $(".fixbutton-text").stop().animate({"opacity": 0, "left": 20}, 300, "easeOutCubic") + + $(".fixbutton-bg").on "mouseout", -> + if $(".fixbutton").hasClass("dragging") + return true + $(".fixbutton-bg").stop().animate({"scale": 0.6}, 300, "easeOutCubic") + $(".fixbutton-burger").stop().animate({"opacity": 0, "left": -20}, 300, "easeOutCubic") + $(".fixbutton-text").stop().animate({"opacity": 0.9, "left": 0}, 300, "easeOutBack") + + + ###$(".fixbutton-bg").on "click", -> + return false + ### + + $(".fixbutton-bg").on "mousedown", -> + # $(".fixbutton-burger").stop().animate({"scale": 0.7, "left": 0}, 300, "easeOutCubic") + #$("#inner-iframe").toggleClass("back") + #$(".wrapper-iframe").stop().animate({"scale": 0.9}, 600, "easeOutCubic") + #$("body").addClass("back") + + $(".fixbutton-bg").on "mouseup", -> + # $(".fixbutton-burger").stop().animate({"scale": 1, "left": 0}, 600, "easeOutElastic") + + + +window.Fixbutton = Fixbutton diff --git a/src/Ui/media/Infopanel.coffee b/src/Ui/media/Infopanel.coffee new file mode 100644 index 00000000..3a490364 --- /dev/null +++ b/src/Ui/media/Infopanel.coffee @@ -0,0 +1,57 @@ +class Infopanel + constructor: (@elem) -> + @visible = false + + show: (closed=false) => + @elem.parent().addClass("visible") + if closed + @close() + else + @open() + + unfold: => + @elem.toggleClass("unfolded") + return false + + updateEvents: => + @elem.off("click") + @elem.find(".close").off("click") + @elem.find(".line").off("click") + + @elem.find(".line").on("click", @unfold) + + if @elem.hasClass("closed") + @elem.on "click", => + @onOpened() + @open() + else + @elem.find(".close").on "click", => + @onClosed() + @close() + + hide: => + @elem.parent().removeClass("visible") + + close: => + @elem.addClass("closed") + @updateEvents() + return false + + open: => + @elem.removeClass("closed") + @updateEvents() + return false + + setTitle: (line1, line2) => + @elem.find(".line-1").text(line1) + @elem.find(".line-2").text(line2) + + setClosedNum: (num) => + @elem.find(".closed-num").text(num) + + setAction: (title, func) => + @elem.find(".button").text(title).off("click").on("click", func) + + + +window.Infopanel = Infopanel diff --git a/src/Ui/media/Loading.coffee b/src/Ui/media/Loading.coffee new file mode 100644 index 00000000..8e35ce66 --- /dev/null +++ b/src/Ui/media/Loading.coffee @@ -0,0 +1,91 @@ +class Loading + constructor: (@wrapper) -> + if window.show_loadingscreen then @showScreen() + @timer_hide = null + @timer_set = null + + setProgress: (percent) -> + if @timer_hide + clearInterval @timer_hide + @timer_set = RateLimit 500, -> + $(".progressbar").css("transform": "scaleX(#{parseInt(percent*100)/100})").css("opacity", "1").css("display", "block") + + hideProgress: -> + @log "hideProgress" + if @timer_set + clearInterval @timer_set + @timer_hide = setTimeout ( => + $(".progressbar").css("transform": "scaleX(1)").css("opacity", "0").hideLater(1000) + ), 300 + + + showScreen: -> + $(".loadingscreen").css("display", "block").addClassLater("ready") + @screen_visible = true + @printLine "   Connecting..." + + + showTooLarge: (site_info) -> + @log "Displaying large site confirmation" + if $(".console .button-setlimit").length == 0 # Not displaying it yet + line = @printLine("Site size: #{parseInt(site_info.settings.size/1024/1024)}MB is larger than default allowed #{parseInt(site_info.size_limit)}MB", "warning") + button = $("" + "Open site and set size limit to #{site_info.next_size_limit}MB" + "") + button.on "click", => + button.addClass("loading") + return @wrapper.setSizeLimit(site_info.next_size_limit) + line.after(button) + setTimeout (=> + @printLine('Ready.') + ), 100 + + showTrackerTorBridge: (server_info) -> + if $(".console .button-settrackerbridge").length == 0 and not server_info.tor_use_meek_bridges + line = @printLine("Tracker connection error detected.", "error") + button = $("" + "Use Tor meek bridges for tracker connections" + "") + button.on "click", => + button.addClass("loading") + @wrapper.ws.cmd "configSet", ["tor_use_bridges", ""] + @wrapper.ws.cmd "configSet", ["trackers_proxy", "tor"] + @wrapper.ws.cmd "siteUpdate", {address: @wrapper.site_info.address, announce: true} + @wrapper.reloadIframe() + return false + line.after(button) + if not server_info.tor_has_meek_bridges + button.addClass("disabled") + @printLine("No meek bridge support in your client, please download the latest bundle.", "warning") + + # We dont need loadingscreen anymore + hideScreen: -> + @log "hideScreen" + if not $(".loadingscreen").hasClass("done") # Only if its not animating already + if @screen_visible # Hide with animate + $(".loadingscreen").addClass("done").removeLater(2000) + else # Not visible, just remove + $(".loadingscreen").remove() + @screen_visible = false + + + # Append text to last line of loadingscreen + print: (text, type="normal") -> + if not @screen_visible then return false + $(".loadingscreen .console .cursor").remove() # Remove previous cursor + last_line = $(".loadingscreen .console .console-line:last-child") + if type == "error" then text = "#{text}" + last_line.html(last_line.html()+text) + + + # Add line to loading screen + printLine: (text, type="normal") -> + if not @screen_visible then return false + $(".loadingscreen .console .cursor").remove() # Remove previous cursor + if type == "error" then text = "#{text}" else text = text+" " + + line = $("
#{text}
").appendTo(".loadingscreen .console") + if type == "warning" then line.addClass("console-warning") + return line + + log: (args...) -> + console.log "[Loading]", args... + + +window.Loading = Loading diff --git a/src/Ui/media/Notifications.coffee b/src/Ui/media/Notifications.coffee new file mode 100644 index 00000000..8898b645 --- /dev/null +++ b/src/Ui/media/Notifications.coffee @@ -0,0 +1,89 @@ +class Notifications + constructor: (@elem) -> + @ + + test: -> + setTimeout (=> + @add("connection", "error", "Connection lost to UiServer on localhost!") + @add("message-Anyone", "info", "New from Anyone.") + ), 1000 + setTimeout (=> + @add("connection", "done", "UiServer connection recovered.", 5000) + ), 3000 + + + add: (id, type, body, timeout=0) -> + id = id.replace /[^A-Za-z0-9-]/g, "" + # Close notifications with same id + for elem in $(".notification-#{id}") + @close $(elem) + + # Create element + elem = $(".notification.template", @elem).clone().removeClass("template") + elem.addClass("notification-#{type}").addClass("notification-#{id}") + if type == "progress" + elem.addClass("notification-done") + + # Update text + if type == "error" + $(".notification-icon", elem).html("!") + else if type == "done" + $(".notification-icon", elem).html("
") + else if type == "progress" + $(".notification-icon", elem).html("
") + else if type == "ask" + $(".notification-icon", elem).html("?") + else + $(".notification-icon", elem).html("i") + + if typeof(body) == "string" + $(".body", elem).html("
"+body+"
") + else + $(".body", elem).html("").append(body) + + elem.appendTo(@elem) + + # Timeout + if timeout + $(".close", elem).remove() # No need of close button + setTimeout (=> + @close elem + ), timeout + + # Animate + width = Math.min(elem.outerWidth() + 50, 580) + if not timeout then width += 20 # Add space for close button + if elem.outerHeight() > 55 then elem.addClass("long") + elem.css({"width": "50px", "transform": "scale(0.01)"}) + elem.animate({"scale": 1}, 800, "easeOutElastic") + elem.animate({"width": width}, 700, "easeInOutCubic") + $(".body", elem).css("width": (width - 50)) + $(".body", elem).cssLater("box-shadow", "0px 0px 5px rgba(0,0,0,0.1)", 1000) + + # Close button or Confirm button + $(".close, .button", elem).on "click", => + @close elem + return false + + # Select list + $(".select", elem).on "click", => + @close elem + + # Input enter + $("input", elem).on "keyup", (e) => + if e.keyCode == 13 + @close elem + + return elem + + + close: (elem) -> + elem.stop().animate {"width": 0, "opacity": 0}, 700, "easeInOutCubic" + elem.slideUp 300, (-> elem.remove()) + + + log: (args...) -> + console.log "[Notifications]", args... + + +window.Notifications = Notifications diff --git a/src/Ui/media/Wrapper.coffee b/src/Ui/media/Wrapper.coffee new file mode 100644 index 00000000..1b98855e --- /dev/null +++ b/src/Ui/media/Wrapper.coffee @@ -0,0 +1,714 @@ +class Wrapper + constructor: (ws_url) -> + @log "Created!" + + @loading = new Loading(@) + @notifications = new Notifications($(".notifications")) + @infopanel = new Infopanel($(".infopanel")) + @infopanel.onClosed = => + @ws.cmd("siteSetSettingsValue", ["modified_files_notification", false]) + @infopanel.onOpened = => + @ws.cmd("siteSetSettingsValue", ["modified_files_notification", true]) + @fixbutton = new Fixbutton() + + window.addEventListener("message", @onMessageInner, false) + @inner = document.getElementById("inner-iframe").contentWindow + @ws = new ZeroWebsocket(ws_url) + @ws.next_message_id = 1000000 # Avoid messageid collision :) + @ws.onOpen = @onOpenWebsocket + @ws.onClose = @onCloseWebsocket + @ws.onMessage = @onMessageWebsocket + @ws.connect() + @ws_error = null # Ws error message + + @next_cmd_message_id = -1 + + @site_info = null # Hold latest site info + @server_info = null # Hold latest server info + @event_site_info = $.Deferred() # Event when site_info received + @inner_loaded = false # If iframe loaded or not + @inner_ready = false # Inner frame ready to receive messages + @wrapperWsInited = false # Wrapper notified on websocket open + @site_error = null # Latest failed file download + @address = null + @opener_tested = false + @announcer_line = null + @web_notifications = {} + @is_title_changed = false + + @allowed_event_constructors = [window.MouseEvent, window.KeyboardEvent, window.PointerEvent] # Allowed event constructors + + window.onload = @onPageLoad # On iframe loaded + window.onhashchange = (e) => # On hash change + @log "Hashchange", window.location.hash + if window.location.hash + src = $("#inner-iframe").attr("src").replace(/#.*/, "")+window.location.hash + $("#inner-iframe").attr("src", src) + + window.onpopstate = (e) => + @sendInner {"cmd": "wrapperPopState", "params": {"href": document.location.href, "state": e.state}} + + $("#inner-iframe").focus() + + + verifyEvent: (allowed_target, e) => + if not e.originalEvent.isTrusted + throw "Event not trusted" + + if e.originalEvent.constructor not in @allowed_event_constructors + throw "Invalid event constructor: #{e.constructor} not in #{JSON.stringify(@allowed_event_constructors)}" + + if e.originalEvent.currentTarget != allowed_target[0] + throw "Invalid event target: #{e.originalEvent.currentTarget} != #{allowed_target[0]}" + + # Incoming message from UiServer websocket + onMessageWebsocket: (e) => + message = JSON.parse(e.data) + @handleMessageWebsocket(message) + + handleMessageWebsocket: (message) => + cmd = message.cmd + if cmd == "response" + if @ws.waiting_cb[message.to]? # We are waiting for response + @ws.waiting_cb[message.to](message.result) + else + @sendInner message # Pass message to inner frame + else if cmd == "notification" # Display notification + type = message.params[0] + id = "notification-ws-#{message.id}" + if "-" in message.params[0] # - in first param: message id defined + [id, type] = message.params[0].split("-") + @notifications.add(id, type, message.params[1], message.params[2]) + else if cmd == "progress" # Display notification + @actionProgress(message) + else if cmd == "prompt" # Prompt input + @displayPrompt message.params[0], message.params[1], message.params[2], message.params[3], (res) => + @ws.response message.id, res + else if cmd == "confirm" # Confirm action + @displayConfirm message.params[0], message.params[1], (res) => + @ws.response message.id, res + else if cmd == "setSiteInfo" + @sendInner message # Pass to inner frame + if message.params.address == @address # Current page + @setSiteInfo message.params + @updateProgress message.params + else if cmd == "setAnnouncerInfo" + @sendInner message # Pass to inner frame + if message.params.address == @address # Current page + @setAnnouncerInfo message.params + @updateProgress message.params + else if cmd == "error" + @notifications.add("notification-#{message.id}", "error", message.params, 0) + else if cmd == "updating" # Close connection + @log "Updating: Closing websocket" + @ws.ws.close() + @ws.onCloseWebsocket(null, 4000) + else if cmd == "redirect" + window.top.location = message.params + else if cmd == "injectHtml" + $("body").append(message.params) + else if cmd == "injectScript" + script_tag = $(" +

ZeroNet requires JavaScript support.

If you use NoScript/Tor browser: Click on toolbar icon with the notification and choose "Temp. TRUSTED" for 127.0.0.1. + + + + +
+
+
+ + +
+
+
+ +
+ + +
+
! Test notification×
+
+ + +
+
+ 8 +
+ 8 modified files
content.json, data.json +
+ Sign & Publish + × +
+
+ + +
+ Config +
+
+
+
+
+
+ + + + + + + + + + + diff --git a/src/User/User.py b/src/User/User.py new file mode 100644 index 00000000..dbcfc56f --- /dev/null +++ b/src/User/User.py @@ -0,0 +1,176 @@ +import logging +import json +import time +import binascii + +import gevent + +import util +from Crypt import CryptBitcoin +from Plugin import PluginManager +from Config import config +from util import helper +from Debug import Debug + + +@PluginManager.acceptPlugins +class User(object): + def __init__(self, master_address=None, master_seed=None, data={}): + if master_seed: + self.master_seed = master_seed + self.master_address = CryptBitcoin.privatekeyToAddress(self.master_seed) + elif master_address: + self.master_address = master_address + self.master_seed = data.get("master_seed") + else: + self.master_seed = CryptBitcoin.newSeed() + self.master_address = CryptBitcoin.privatekeyToAddress(self.master_seed) + self.sites = data.get("sites", {}) + self.certs = data.get("certs", {}) + self.settings = data.get("settings", {}) + self.delayed_save_thread = None + + self.log = logging.getLogger("User:%s" % self.master_address) + + # Save to data/users.json + @util.Noparallel(queue=True, ignore_class=True) + def save(self): + s = time.time() + users = json.load(open("%s/users.json" % config.data_dir)) + if self.master_address not in users: + users[self.master_address] = {} # Create if not exist + user_data = users[self.master_address] + if self.master_seed: + user_data["master_seed"] = self.master_seed + user_data["sites"] = self.sites + user_data["certs"] = self.certs + user_data["settings"] = self.settings + helper.atomicWrite("%s/users.json" % config.data_dir, helper.jsonDumps(users).encode("utf8")) + self.log.debug("Saved in %.3fs" % (time.time() - s)) + self.delayed_save_thread = None + + def saveDelayed(self): + if not self.delayed_save_thread: + self.delayed_save_thread = gevent.spawn_later(5, self.save) + + def getAddressAuthIndex(self, address): + return int(binascii.hexlify(address.encode()), 16) + + @util.Noparallel() + def generateAuthAddress(self, address): + s = time.time() + address_id = self.getAddressAuthIndex(address) # Convert site address to int + auth_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, address_id) + self.sites[address] = { + "auth_address": CryptBitcoin.privatekeyToAddress(auth_privatekey), + "auth_privatekey": auth_privatekey + } + self.saveDelayed() + self.log.debug("Added new site: %s in %.3fs" % (address, time.time() - s)) + return self.sites[address] + + # Get user site data + # Return: {"auth_address": "xxx", "auth_privatekey": "xxx"} + def getSiteData(self, address, create=True): + if address not in self.sites: # Generate new BIP32 child key based on site address + if not create: + return {"auth_address": None, "auth_privatekey": None} # Dont create user yet + self.generateAuthAddress(address) + return self.sites[address] + + def deleteSiteData(self, address): + if address in self.sites: + del(self.sites[address]) + self.saveDelayed() + self.log.debug("Deleted site: %s" % address) + + def setSiteSettings(self, address, settings): + site_data = self.getSiteData(address) + site_data["settings"] = settings + self.saveDelayed() + return site_data + + # Get data for a new, unique site + # Return: [site_address, bip32_index, {"auth_address": "xxx", "auth_privatekey": "xxx", "privatekey": "xxx"}] + def getNewSiteData(self): + import random + bip32_index = random.randrange(2 ** 256) % 100000000 + site_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, bip32_index) + site_address = CryptBitcoin.privatekeyToAddress(site_privatekey) + if site_address in self.sites: + raise Exception("Random error: site exist!") + # Save to sites + self.getSiteData(site_address) + self.sites[site_address]["privatekey"] = site_privatekey + self.save() + return site_address, bip32_index, self.sites[site_address] + + # Get BIP32 address from site address + # Return: BIP32 auth address + def getAuthAddress(self, address, create=True): + cert = self.getCert(address) + if cert: + return cert["auth_address"] + else: + return self.getSiteData(address, create)["auth_address"] + + def getAuthPrivatekey(self, address, create=True): + cert = self.getCert(address) + if cert: + return cert["auth_privatekey"] + else: + return self.getSiteData(address, create)["auth_privatekey"] + + # Add cert for the user + def addCert(self, auth_address, domain, auth_type, auth_user_name, cert_sign): + # Find privatekey by auth address + auth_privatekey = [site["auth_privatekey"] for site in list(self.sites.values()) if site["auth_address"] == auth_address][0] + cert_node = { + "auth_address": auth_address, + "auth_privatekey": auth_privatekey, + "auth_type": auth_type, + "auth_user_name": auth_user_name, + "cert_sign": cert_sign + } + # Check if we have already cert for that domain and its not the same + if self.certs.get(domain) and self.certs[domain] != cert_node: + return False + elif self.certs.get(domain) == cert_node: # Same, not updated + return None + else: # Not exist yet, add + self.certs[domain] = cert_node + self.save() + return True + + # Remove cert from user + def deleteCert(self, domain): + del self.certs[domain] + + # Set active cert for a site + def setCert(self, address, domain): + site_data = self.getSiteData(address) + if domain: + site_data["cert"] = domain + else: + if "cert" in site_data: + del site_data["cert"] + self.saveDelayed() + return site_data + + # Get cert for the site address + # Return: { "auth_address":.., "auth_privatekey":.., "auth_type": "web", "auth_user_name": "nofish", "cert_sign":.. } or None + def getCert(self, address): + site_data = self.getSiteData(address, create=False) + if not site_data or "cert" not in site_data: + return None # Site dont have cert + return self.certs.get(site_data["cert"]) + + # Get cert user name for the site address + # Return: user@certprovider.bit or None + def getCertUserId(self, address): + site_data = self.getSiteData(address, create=False) + if not site_data or "cert" not in site_data: + return None # Site dont have cert + cert = self.certs.get(site_data["cert"]) + if cert: + return cert["auth_user_name"] + "@" + site_data["cert"] diff --git a/src/User/UserManager.py b/src/User/UserManager.py new file mode 100644 index 00000000..067734a6 --- /dev/null +++ b/src/User/UserManager.py @@ -0,0 +1,77 @@ +# Included modules +import json +import logging +import time + +# ZeroNet Modules +from .User import User +from Plugin import PluginManager +from Config import config + + +@PluginManager.acceptPlugins +class UserManager(object): + def __init__(self): + self.users = {} + self.log = logging.getLogger("UserManager") + + # Load all user from data/users.json + def load(self): + if not self.users: + self.users = {} + + user_found = [] + added = 0 + s = time.time() + # Load new users + try: + json_path = "%s/users.json" % config.data_dir + data = json.load(open(json_path)) + except Exception as err: + raise Exception("Unable to load %s: %s" % (json_path, err)) + + for master_address, data in list(data.items()): + if master_address not in self.users: + user = User(master_address, data=data) + self.users[master_address] = user + added += 1 + user_found.append(master_address) + + # Remove deleted adresses + for master_address in list(self.users.keys()): + if master_address not in user_found: + del(self.users[master_address]) + self.log.debug("Removed user: %s" % master_address) + + if added: + self.log.debug("Added %s users in %.3fs" % (added, time.time() - s)) + + # Create new user + # Return: User + def create(self, master_address=None, master_seed=None): + self.list() # Load the users if it's not loaded yet + user = User(master_address, master_seed) + self.log.debug("Created user: %s" % user.master_address) + if user.master_address: # If successfully created + self.users[user.master_address] = user + user.saveDelayed() + return user + + # List all users from data/users.json + # Return: {"usermasteraddr": User} + def list(self): + if self.users == {}: # Not loaded yet + self.load() + return self.users + + # Get user based on master_address + # Return: User or None + def get(self, master_address=None): + users = self.list() + if users: + return list(users.values())[0] # Single user mode, always return the first + else: + return None + + +user_manager = UserManager() # Singleton diff --git a/src/User/__init__.py b/src/User/__init__.py new file mode 100644 index 00000000..4db9149e --- /dev/null +++ b/src/User/__init__.py @@ -0,0 +1 @@ +from .User import User diff --git a/src/Worker/Worker.py b/src/Worker/Worker.py new file mode 100644 index 00000000..b7111ba1 --- /dev/null +++ b/src/Worker/Worker.py @@ -0,0 +1,239 @@ +import time + +import gevent +import gevent.lock + +from Debug import Debug +from Config import config +from Content.ContentManager import VerifyError + + +class WorkerDownloadError(Exception): + pass + + +class WorkerIOError(Exception): + pass + + +class WorkerStop(Exception): + pass + + +class Worker(object): + + def __init__(self, manager, peer): + self.manager = manager + self.peer = peer + self.task = None + self.key = None + self.running = False + self.thread = None + self.num_downloaded = 0 + self.num_failed = 0 + + def __str__(self): + return "Worker %s %s" % (self.manager.site.address_short, self.key) + + def __repr__(self): + return "<%s>" % self.__str__() + + def waitForTask(self, task, timeout): # Wait for other workers to finish the task + for sleep_i in range(1, timeout * 10): + time.sleep(0.1) + if task["done"] or task["workers_num"] == 0: + if config.verbose: + self.manager.log.debug("%s: %s, picked task free after %ss sleep. (done: %s)" % ( + self.key, task["inner_path"], 0.1 * sleep_i, task["done"] + )) + break + + if sleep_i % 10 == 0: + workers = self.manager.findWorkers(task) + if not workers or not workers[0].peer.connection: + break + worker_idle = time.time() - workers[0].peer.connection.last_recv_time + if worker_idle > 1: + if config.verbose: + self.manager.log.debug("%s: %s, worker %s seems idle, picked up task after %ss sleep. (done: %s)" % ( + self.key, task["inner_path"], workers[0].key, 0.1 * sleep_i, task["done"] + )) + break + return True + + def pickTask(self): # Find and select a new task for the worker + task = self.manager.getTask(self.peer) + if not task: # No more task + time.sleep(0.1) # Wait a bit for new tasks + task = self.manager.getTask(self.peer) + if not task: # Still no task, stop it + stats = "downloaded files: %s, failed: %s" % (self.num_downloaded, self.num_failed) + self.manager.log.debug("%s: No task found, stopping (%s)" % (self.key, stats)) + return False + + if not task["time_started"]: + task["time_started"] = time.time() # Task started now + + if task["workers_num"] > 0: # Wait a bit if someone already working on it + if task["peers"]: # It's an update + timeout = 3 + else: + timeout = 1 + + if task["size"] > 100 * 1024 * 1024: + timeout = timeout * 2 + + if config.verbose: + self.manager.log.debug("%s: Someone already working on %s (pri: %s), sleeping %s sec..." % ( + self.key, task["inner_path"], task["priority"], timeout + )) + + self.waitForTask(task, timeout) + return task + + def downloadTask(self, task): + try: + buff = self.peer.getFile(task["site"].address, task["inner_path"], task["size"]) + except Exception as err: + self.manager.log.debug("%s: getFile error: %s" % (self.key, err)) + raise WorkerDownloadError(str(err)) + + if not buff: + raise WorkerDownloadError("No response") + + return buff + + def getTaskLock(self, task): + if task["lock"] is None: + task["lock"] = gevent.lock.Semaphore() + return task["lock"] + + def writeTask(self, task, buff): + buff.seek(0) + try: + task["site"].storage.write(task["inner_path"], buff) + except Exception as err: + if type(err) == Debug.Notify: + self.manager.log.debug("%s: Write aborted: %s (%s: %s)" % (self.key, task["inner_path"], type(err), err)) + else: + self.manager.log.error("%s: Error writing: %s (%s: %s)" % (self.key, task["inner_path"], type(err), err)) + raise WorkerIOError(str(err)) + + def onTaskVerifyFail(self, task, error_message): + self.num_failed += 1 + if self.manager.started_task_num < 50 or config.verbose: + self.manager.log.debug( + "%s: Verify failed: %s, error: %s, failed peers: %s, workers: %s" % + (self.key, task["inner_path"], error_message, len(task["failed"]), task["workers_num"]) + ) + task["failed"].append(self.peer) + self.peer.hash_failed += 1 + if self.peer.hash_failed >= max(len(self.manager.tasks), 3) or self.peer.connection_error > 10: + # Broken peer: More fails than tasks number but atleast 3 + raise WorkerStop( + "Too many errors (hash failed: %s, connection error: %s)" % + (self.peer.hash_failed, self.peer.connection_error) + ) + + def handleTask(self, task): + download_err = write_err = False + + write_lock = None + try: + buff = self.downloadTask(task) + + if task["done"] is True: # Task done, try to find new one + return None + + if self.running is False: # Worker no longer needed or got killed + self.manager.log.debug("%s: No longer needed, returning: %s" % (self.key, task["inner_path"])) + raise WorkerStop("Running got disabled") + + write_lock = self.getTaskLock(task) + write_lock.acquire() + if task["site"].content_manager.verifyFile(task["inner_path"], buff) is None: + is_same = True + else: + is_same = False + is_valid = True + except (WorkerDownloadError, VerifyError) as err: + download_err = err + is_valid = False + is_same = False + + if is_valid and not is_same: + if self.manager.started_task_num < 50 or task["priority"] > 10 or config.verbose: + self.manager.log.debug("%s: Verify correct: %s" % (self.key, task["inner_path"])) + try: + self.writeTask(task, buff) + except WorkerIOError as err: + write_err = err + + if not task["done"]: + if write_err: + self.manager.failTask(task, reason="Write error") + self.num_failed += 1 + self.manager.log.error("%s: Error writing %s: %s" % (self.key, task["inner_path"], write_err)) + elif is_valid: + self.manager.doneTask(task) + self.num_downloaded += 1 + + if write_lock is not None and write_lock.locked(): + write_lock.release() + + if not is_valid: + self.onTaskVerifyFail(task, download_err) + time.sleep(1) + return False + + return True + + def downloader(self): + self.peer.hash_failed = 0 # Reset hash error counter + while self.running: + # Try to pickup free file download task + task = self.pickTask() + + if not task: + break + + if task["done"]: + continue + + self.task = task + + self.manager.addTaskWorker(task, self) + + try: + success = self.handleTask(task) + except WorkerStop as err: + self.manager.log.debug("%s: Worker stopped: %s" % (self.key, err)) + self.manager.removeTaskWorker(task, self) + break + + self.manager.removeTaskWorker(task, self) + + self.peer.onWorkerDone() + self.running = False + self.manager.removeWorker(self) + + # Start the worker + def start(self): + self.running = True + self.thread = gevent.spawn(self.downloader) + + # Skip current task + def skip(self, reason="Unknown"): + self.manager.log.debug("%s: Force skipping (reason: %s)" % (self.key, reason)) + if self.thread: + self.thread.kill(exception=Debug.createNotifyType("Worker skipping (reason: %s)" % reason)) + self.start() + + # Force stop the worker + def stop(self, reason="Unknown"): + self.manager.log.debug("%s: Force stopping (reason: %s)" % (self.key, reason)) + self.running = False + if self.thread: + self.thread.kill(exception=Debug.createNotifyType("Worker stopped (reason: %s)" % reason)) + del self.thread + self.manager.removeWorker(self) diff --git a/src/Worker/WorkerManager.py b/src/Worker/WorkerManager.py new file mode 100644 index 00000000..f68e8410 --- /dev/null +++ b/src/Worker/WorkerManager.py @@ -0,0 +1,600 @@ +import time +import logging +import collections + +import gevent + +from .Worker import Worker +from .WorkerTaskManager import WorkerTaskManager +from Config import config +from util import helper +from Plugin import PluginManager +from Debug.DebugLock import DebugLock +import util + + +@PluginManager.acceptPlugins +class WorkerManager(object): + + def __init__(self, site): + self.site = site + self.workers = {} # Key: ip:port, Value: Worker.Worker + self.tasks = WorkerTaskManager() + self.next_task_id = 1 + self.lock_add_task = DebugLock(name="Lock AddTask:%s" % self.site.address_short) + # {"id": 1, "evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "optional_hash_id": None, + # "time_started": None, "time_added": time.time(), "peers": peers, "priority": 0, "failed": peer_ids, "lock": None or gevent.lock.RLock} + self.started_task_num = 0 # Last added task num + self.asked_peers = [] + self.running = True + self.time_task_added = 0 + self.log = logging.getLogger("WorkerManager:%s" % self.site.address_short) + self.site.greenlet_manager.spawn(self.checkTasks) + + def __str__(self): + return "WorkerManager %s" % self.site.address_short + + def __repr__(self): + return "<%s>" % self.__str__() + + # Check expired tasks + def checkTasks(self): + while self.running: + tasks = task = worker = workers = None # Cleanup local variables + announced = False + time.sleep(15) # Check every 15 sec + + # Clean up workers + for worker in list(self.workers.values()): + if worker.task and worker.task["done"]: + worker.skip(reason="Task done") # Stop workers with task done + + if not self.tasks: + continue + + tasks = self.tasks[:] # Copy it so removing elements wont cause any problem + num_tasks_started = len([task for task in tasks if task["time_started"]]) + + self.log.debug( + "Tasks: %s, started: %s, bad files: %s, total started: %s" % + (len(tasks), num_tasks_started, len(self.site.bad_files), self.started_task_num) + ) + + for task in tasks: + if task["time_started"] and time.time() >= task["time_started"] + 60: + self.log.debug("Timeout, Skipping: %s" % task) # Task taking too long time, skip it + # Skip to next file workers + workers = self.findWorkers(task) + if workers: + for worker in workers: + worker.skip(reason="Task timeout") + else: + self.failTask(task, reason="No workers") + + elif time.time() >= task["time_added"] + 60 and not self.workers: # No workers left + self.failTask(task, reason="Timeout") + + elif (task["time_started"] and time.time() >= task["time_started"] + 15) or not self.workers: + # Find more workers: Task started more than 15 sec ago or no workers + workers = self.findWorkers(task) + self.log.debug( + "Slow task: %s, (workers: %s, optional_hash_id: %s, peers: %s, failed: %s, asked: %s)" % + ( + task["inner_path"], len(workers), task["optional_hash_id"], + len(task["peers"] or []), len(task["failed"]), len(self.asked_peers) + ) + ) + if not announced and task["site"].isAddedRecently(): + task["site"].announce(mode="more") # Find more peers + announced = True + if task["optional_hash_id"]: + if self.workers: + if not task["time_started"]: + ask_limit = 20 + else: + ask_limit = max(10, time.time() - task["time_started"]) + if len(self.asked_peers) < ask_limit and len(task["peers"] or []) <= len(task["failed"]) * 2: + # Re-search for high priority + self.startFindOptional(find_more=True) + if task["peers"]: + peers_try = [peer for peer in task["peers"] if peer not in task["failed"] and peer not in workers] + if peers_try: + self.startWorkers(peers_try, force_num=5, reason="Task checker (optional, has peers)") + else: + self.startFindOptional(find_more=True) + else: + self.startFindOptional(find_more=True) + else: + if task["peers"]: # Release the peer lock + self.log.debug("Task peer lock release: %s" % task["inner_path"]) + task["peers"] = [] + self.startWorkers(reason="Task checker") + + if len(self.tasks) > len(self.workers) * 2 and len(self.workers) < self.getMaxWorkers(): + self.startWorkers(reason="Task checker (need more workers)") + + self.log.debug("checkTasks stopped running") + + # Returns the next free or less worked task + def getTask(self, peer): + for task in self.tasks: # Find a task + if task["peers"] and peer not in task["peers"]: + continue # This peer not allowed to pick this task + if peer in task["failed"]: + continue # Peer already tried to solve this, but failed + if task["optional_hash_id"] and task["peers"] is None: + continue # No peers found yet for the optional task + if task["done"]: + continue + return task + + def removeSolvedFileTasks(self, mark_as_good=True): + for task in self.tasks[:]: + if task["inner_path"] not in self.site.bad_files: + self.log.debug("No longer in bad_files, marking as %s: %s" % (mark_as_good, task["inner_path"])) + task["done"] = True + task["evt"].set(mark_as_good) + self.tasks.remove(task) + if not self.tasks: + self.started_task_num = 0 + self.site.updateWebsocket() + + # New peers added to site + def onPeers(self): + self.startWorkers(reason="More peers found") + + def getMaxWorkers(self): + if len(self.tasks) > 50: + return config.workers * 3 + else: + return config.workers + + # Add new worker + def addWorker(self, peer, multiplexing=False, force=False): + key = peer.key + if len(self.workers) > self.getMaxWorkers() and not force: + return False + if multiplexing: # Add even if we already have worker for this peer + key = "%s/%s" % (key, len(self.workers)) + if key not in self.workers: + # We dont have worker for that peer and workers num less than max + task = self.getTask(peer) + if task: + worker = Worker(self, peer) + self.workers[key] = worker + worker.key = key + worker.start() + return worker + else: + return False + else: # We have worker for this peer or its over the limit + return False + + def taskAddPeer(self, task, peer): + if task["peers"] is None: + task["peers"] = [] + if peer in task["failed"]: + return False + + if peer not in task["peers"]: + task["peers"].append(peer) + return True + + # Start workers to process tasks + def startWorkers(self, peers=None, force_num=0, reason="Unknown"): + if not self.tasks: + return False # No task for workers + max_workers = min(self.getMaxWorkers(), len(self.site.peers)) + if len(self.workers) >= max_workers and not peers: + return False # Workers number already maxed and no starting peers defined + self.log.debug( + "Starting workers (%s), tasks: %s, peers: %s, workers: %s" % + (reason, len(self.tasks), len(peers or []), len(self.workers)) + ) + if not peers: + peers = self.site.getConnectedPeers() + if len(peers) < max_workers: + peers += self.site.getRecentPeers(max_workers * 2) + if type(peers) is set: + peers = list(peers) + + # Sort by ping + peers.sort(key=lambda peer: peer.connection.last_ping_delay if peer.connection and peer.connection.last_ping_delay and len(peer.connection.waiting_requests) == 0 and peer.connection.connected else 9999) + + for peer in peers: # One worker for every peer + if peers and peer not in peers: + continue # If peers defined and peer not valid + + if force_num: + worker = self.addWorker(peer, force=True) + force_num -= 1 + else: + worker = self.addWorker(peer) + + if worker: + self.log.debug("Added worker: %s (rep: %s), workers: %s/%s" % (peer.key, peer.reputation, len(self.workers), max_workers)) + + # Find peers for optional hash in local hash tables and add to task peers + def findOptionalTasks(self, optional_tasks, reset_task=False): + found = collections.defaultdict(list) # { found_hash: [peer1, peer2...], ...} + + for peer in list(self.site.peers.values()): + if not peer.has_hashfield: + continue + + hashfield_set = set(peer.hashfield) # Finding in set is much faster + for task in optional_tasks: + optional_hash_id = task["optional_hash_id"] + if optional_hash_id in hashfield_set: + if reset_task and len(task["failed"]) > 0: + task["failed"] = [] + if peer in task["failed"]: + continue + if self.taskAddPeer(task, peer): + found[optional_hash_id].append(peer) + + return found + + # Find peers for optional hash ids in local hash tables + def findOptionalHashIds(self, optional_hash_ids, limit=0): + found = collections.defaultdict(list) # { found_hash_id: [peer1, peer2...], ...} + + for peer in list(self.site.peers.values()): + if not peer.has_hashfield: + continue + + hashfield_set = set(peer.hashfield) # Finding in set is much faster + for optional_hash_id in optional_hash_ids: + if optional_hash_id in hashfield_set: + found[optional_hash_id].append(peer) + if limit and len(found[optional_hash_id]) >= limit: + optional_hash_ids.remove(optional_hash_id) + + return found + + # Add peers to tasks from found result + def addOptionalPeers(self, found_ips): + found = collections.defaultdict(list) + for hash_id, peer_ips in found_ips.items(): + task = [task for task in self.tasks if task["optional_hash_id"] == hash_id] + if task: # Found task, lets take the first + task = task[0] + else: + continue + for peer_ip in peer_ips: + peer = self.site.addPeer(peer_ip[0], peer_ip[1], return_peer=True, source="optional") + if not peer: + continue + if self.taskAddPeer(task, peer): + found[hash_id].append(peer) + if peer.hashfield.appendHashId(hash_id): # Peer has this file + peer.time_hashfield = None # Peer hashfield probably outdated + + return found + + # Start find peers for optional files + @util.Noparallel(blocking=False, ignore_args=True) + def startFindOptional(self, reset_task=False, find_more=False, high_priority=False): + # Wait for more file requests + if len(self.tasks) < 20 or high_priority: + time.sleep(0.01) + elif len(self.tasks) > 90: + time.sleep(5) + else: + time.sleep(0.5) + + optional_tasks = [task for task in self.tasks if task["optional_hash_id"]] + if not optional_tasks: + return False + optional_hash_ids = set([task["optional_hash_id"] for task in optional_tasks]) + time_tasks = self.time_task_added + + self.log.debug( + "Finding peers for optional files: %s (reset_task: %s, find_more: %s)" % + (optional_hash_ids, reset_task, find_more) + ) + found = self.findOptionalTasks(optional_tasks, reset_task=reset_task) + + if found: + found_peers = set([peer for peers in list(found.values()) for peer in peers]) + self.startWorkers(found_peers, force_num=3, reason="Optional found in local peers") + + if len(found) < len(optional_hash_ids) or find_more or (high_priority and any(len(peers) < 10 for peers in found.values())): + self.log.debug("No local result for optional files: %s" % (optional_hash_ids - set(found))) + + # Query hashfield from connected peers + threads = [] + peers = self.site.getConnectedPeers() + if not peers: + peers = self.site.getConnectablePeers() + for peer in peers: + threads.append(self.site.greenlet_manager.spawn(peer.updateHashfield, force=find_more)) + gevent.joinall(threads, timeout=5) + + if time_tasks != self.time_task_added: # New task added since start + optional_tasks = [task for task in self.tasks if task["optional_hash_id"]] + optional_hash_ids = set([task["optional_hash_id"] for task in optional_tasks]) + + found = self.findOptionalTasks(optional_tasks) + self.log.debug("Found optional files after query hashtable connected peers: %s/%s" % ( + len(found), len(optional_hash_ids) + )) + + if found: + found_peers = set([peer for hash_id_peers in list(found.values()) for peer in hash_id_peers]) + self.startWorkers(found_peers, force_num=3, reason="Optional found in connected peers") + + if len(found) < len(optional_hash_ids) or find_more: + self.log.debug( + "No connected hashtable result for optional files: %s (asked: %s)" % + (optional_hash_ids - set(found), len(self.asked_peers)) + ) + if not self.tasks: + self.log.debug("No tasks, stopping finding optional peers") + return + + # Try to query connected peers + threads = [] + peers = [peer for peer in self.site.getConnectedPeers() if peer.key not in self.asked_peers][0:10] + if not peers: + peers = self.site.getConnectablePeers(ignore=self.asked_peers) + + for peer in peers: + threads.append(self.site.greenlet_manager.spawn(peer.findHashIds, list(optional_hash_ids))) + self.asked_peers.append(peer.key) + + for i in range(5): + time.sleep(1) + + thread_values = [thread.value for thread in threads if thread.value] + if not thread_values: + continue + + found_ips = helper.mergeDicts(thread_values) + found = self.addOptionalPeers(found_ips) + self.log.debug("Found optional files after findhash connected peers: %s/%s (asked: %s)" % ( + len(found), len(optional_hash_ids), len(threads) + )) + + if found: + found_peers = set([peer for hash_id_peers in list(found.values()) for peer in hash_id_peers]) + self.startWorkers(found_peers, force_num=3, reason="Optional found by findhash connected peers") + + if len(thread_values) == len(threads): + # Got result from all started thread + break + + if len(found) < len(optional_hash_ids): + self.log.debug( + "No findHash result, try random peers: %s (asked: %s)" % + (optional_hash_ids - set(found), len(self.asked_peers)) + ) + # Try to query random peers + + if time_tasks != self.time_task_added: # New task added since start + optional_tasks = [task for task in self.tasks if task["optional_hash_id"]] + optional_hash_ids = set([task["optional_hash_id"] for task in optional_tasks]) + + threads = [] + peers = self.site.getConnectablePeers(ignore=self.asked_peers) + + for peer in peers: + threads.append(self.site.greenlet_manager.spawn(peer.findHashIds, list(optional_hash_ids))) + self.asked_peers.append(peer.key) + + gevent.joinall(threads, timeout=15) + + found_ips = helper.mergeDicts([thread.value for thread in threads if thread.value]) + found = self.addOptionalPeers(found_ips) + self.log.debug("Found optional files after findhash random peers: %s/%s" % (len(found), len(optional_hash_ids))) + + if found: + found_peers = set([peer for hash_id_peers in list(found.values()) for peer in hash_id_peers]) + self.startWorkers(found_peers, force_num=3, reason="Option found using findhash random peers") + + if len(found) < len(optional_hash_ids): + self.log.debug("No findhash result for optional files: %s" % (optional_hash_ids - set(found))) + + if time_tasks != self.time_task_added: # New task added since start + self.log.debug("New task since start, restarting...") + self.site.greenlet_manager.spawnLater(0.1, self.startFindOptional) + else: + self.log.debug("startFindOptional ended") + + # Stop all worker + def stopWorkers(self): + num = 0 + for worker in list(self.workers.values()): + worker.stop(reason="Stopping all workers") + num += 1 + tasks = self.tasks[:] # Copy + for task in tasks: # Mark all current task as failed + self.failTask(task, reason="Stopping all workers") + return num + + # Find workers by task + def findWorkers(self, task): + workers = [] + for worker in list(self.workers.values()): + if worker.task == task: + workers.append(worker) + return workers + + # Ends and remove a worker + def removeWorker(self, worker): + worker.running = False + if worker.key in self.workers: + del(self.workers[worker.key]) + self.log.debug("Removed worker, workers: %s/%s" % (len(self.workers), self.getMaxWorkers())) + if len(self.workers) <= self.getMaxWorkers() / 3 and len(self.asked_peers) < 10: + optional_task = next((task for task in self.tasks if task["optional_hash_id"]), None) + if optional_task: + if len(self.workers) == 0: + self.startFindOptional(find_more=True) + else: + self.startFindOptional() + elif self.tasks and not self.workers and worker.task and len(worker.task["failed"]) < 20: + self.log.debug("Starting new workers... (tasks: %s)" % len(self.tasks)) + self.startWorkers(reason="Removed worker") + + # Tasks sorted by this + def getPriorityBoost(self, inner_path): + if inner_path == "content.json": + return 9999 # Content.json always priority + if inner_path == "index.html": + return 9998 # index.html also important + if "-default" in inner_path: + return -4 # Default files are cloning not important + elif inner_path.endswith("all.css"): + return 14 # boost css files priority + elif inner_path.endswith("all.js"): + return 13 # boost js files priority + elif inner_path.endswith("dbschema.json"): + return 12 # boost database specification + elif inner_path.endswith("content.json"): + return 1 # boost included content.json files priority a bit + elif inner_path.endswith(".json"): + if len(inner_path) < 50: # Boost non-user json files + return 11 + else: + return 2 + return 0 + + def addTaskUpdate(self, task, peer, priority=0): + if priority > task["priority"]: + self.tasks.updateItem(task, "priority", priority) + if peer and task["peers"]: # This peer also has new version, add it to task possible peers + task["peers"].append(peer) + self.log.debug("Added peer %s to %s" % (peer.key, task["inner_path"])) + self.startWorkers([peer], reason="Added new task (update received by peer)") + elif peer and peer in task["failed"]: + task["failed"].remove(peer) # New update arrived, remove the peer from failed peers + self.log.debug("Removed peer %s from failed %s" % (peer.key, task["inner_path"])) + self.startWorkers([peer], reason="Added new task (peer failed before)") + + def addTaskCreate(self, inner_path, peer, priority=0, file_info=None): + evt = gevent.event.AsyncResult() + if peer: + peers = [peer] # Only download from this peer + else: + peers = None + if not file_info: + file_info = self.site.content_manager.getFileInfo(inner_path) + if file_info and file_info["optional"]: + optional_hash_id = helper.toHashId(file_info["sha512"]) + else: + optional_hash_id = None + if file_info: + size = file_info.get("size", 0) + else: + size = 0 + + self.lock_add_task.acquire() + + # Check again if we have task for this file + task = self.tasks.findTask(inner_path) + if task: + self.addTaskUpdate(task, peer, priority) + return task + + priority += self.getPriorityBoost(inner_path) + + if self.started_task_num == 0: # Boost priority for first requested file + priority += 1 + + task = { + "id": self.next_task_id, "evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, + "optional_hash_id": optional_hash_id, "time_added": time.time(), "time_started": None, "lock": None, + "time_action": None, "peers": peers, "priority": priority, "failed": [], "size": size + } + + self.tasks.append(task) + self.lock_add_task.release() + + self.next_task_id += 1 + self.started_task_num += 1 + if config.verbose: + self.log.debug( + "New task: %s, peer lock: %s, priority: %s, optional_hash_id: %s, tasks started: %s" % + (task["inner_path"], peers, priority, optional_hash_id, self.started_task_num) + ) + + self.time_task_added = time.time() + + if optional_hash_id: + if self.asked_peers: + del self.asked_peers[:] # Reset asked peers + self.startFindOptional(high_priority=priority > 0) + + if peers: + self.startWorkers(peers, reason="Added new optional task") + + else: + self.startWorkers(peers, reason="Added new task") + return task + + # Create new task and return asyncresult + def addTask(self, inner_path, peer=None, priority=0, file_info=None): + self.site.onFileStart(inner_path) # First task, trigger site download started + task = self.tasks.findTask(inner_path) + if task: # Already has task for that file + self.addTaskUpdate(task, peer, priority) + else: # No task for that file yet + task = self.addTaskCreate(inner_path, peer, priority, file_info) + return task + + def addTaskWorker(self, task, worker): + try: + self.tasks.updateItem(task, "workers_num", task["workers_num"] + 1) + except ValueError: + task["workers_num"] += 1 + + def removeTaskWorker(self, task, worker): + try: + self.tasks.updateItem(task, "workers_num", task["workers_num"] - 1) + except ValueError: + task["workers_num"] -= 1 + if len(task["failed"]) >= len(self.workers): + fail_reason = "Too many fails: %s (workers: %s)" % (len(task["failed"]), len(self.workers)) + self.failTask(task, reason=fail_reason) + + # Wait for other tasks + def checkComplete(self): + time.sleep(0.1) + if not self.tasks: + self.log.debug("Check complete: No tasks") + self.onComplete() + + def onComplete(self): + self.started_task_num = 0 + del self.asked_peers[:] + self.site.onComplete() # No more task trigger site complete + + # Mark a task done + def doneTask(self, task): + task["done"] = True + self.tasks.remove(task) # Remove from queue + if task["optional_hash_id"]: + self.log.debug( + "Downloaded optional file in %.3fs, adding to hashfield: %s" % + (time.time() - task["time_started"], task["inner_path"]) + ) + self.site.content_manager.optionalDownloaded(task["inner_path"], task["optional_hash_id"], task["size"]) + self.site.onFileDone(task["inner_path"]) + task["evt"].set(True) + if not self.tasks: + self.site.greenlet_manager.spawn(self.checkComplete) + + # Mark a task failed + def failTask(self, task, reason="Unknown"): + try: + self.tasks.remove(task) # Remove from queue + except ValueError as err: + return False + + self.log.debug("Task %s failed (Reason: %s)" % (task["inner_path"], reason)) + task["done"] = True + self.site.onFileFail(task["inner_path"]) + task["evt"].set(False) + if not self.tasks: + self.site.greenlet_manager.spawn(self.checkComplete) diff --git a/src/Worker/WorkerTaskManager.py b/src/Worker/WorkerTaskManager.py new file mode 100644 index 00000000..9359701d --- /dev/null +++ b/src/Worker/WorkerTaskManager.py @@ -0,0 +1,122 @@ +import bisect +from collections.abc import MutableSequence + + +class CustomSortedList(MutableSequence): + def __init__(self): + super().__init__() + self.items = [] # (priority, added index, actual value) + self.logging = False + + def __repr__(self): + return "<{0} {1}>".format(self.__class__.__name__, self.items) + + def __len__(self): + return len(self.items) + + def __getitem__(self, index): + if type(index) is int: + return self.items[index][2] + else: + return [item[2] for item in self.items[index]] + + def __delitem__(self, index): + del self.items[index] + + def __setitem__(self, index, value): + self.items[index] = self.valueToItem(value) + + def __str__(self): + return str(self[:]) + + def insert(self, index, value): + self.append(value) + + def append(self, value): + bisect.insort(self.items, self.valueToItem(value)) + + def updateItem(self, value, update_key=None, update_value=None): + self.remove(value) + if update_key is not None: + value[update_key] = update_value + self.append(value) + + def sort(self, *args, **kwargs): + raise Exception("Sorted list can't be sorted") + + def valueToItem(self, value): + return (self.getPriority(value), self.getId(value), value) + + def getPriority(self, value): + return value + + def getId(self, value): + return id(value) + + def indexSlow(self, value): + for pos, item in enumerate(self.items): + if item[2] == value: + return pos + return None + + def index(self, value): + item = (self.getPriority(value), self.getId(value), value) + bisect_pos = bisect.bisect(self.items, item) - 1 + if bisect_pos >= 0 and self.items[bisect_pos][2] == value: + return bisect_pos + + # Item probably changed since added, switch to slow iteration + pos = self.indexSlow(value) + + if self.logging: + print("Slow index for %s in pos %s bisect: %s" % (item[2], pos, bisect_pos)) + + if pos is None: + raise ValueError("%r not in list" % value) + else: + return pos + + def __contains__(self, value): + try: + self.index(value) + return True + except ValueError: + return False + + +class WorkerTaskManager(CustomSortedList): + def __init__(self): + super().__init__() + self.inner_paths = {} + + def getPriority(self, value): + return 0 - (value["priority"] - value["workers_num"] * 10) + + def getId(self, value): + return value["id"] + + def __contains__(self, value): + return value["inner_path"] in self.inner_paths + + def __delitem__(self, index): + # Remove from inner path cache + del self.inner_paths[self.items[index][2]["inner_path"]] + super().__delitem__(index) + + # Fast task search by inner_path + + def append(self, task): + if task["inner_path"] in self.inner_paths: + raise ValueError("File %s already has a task" % task["inner_path"]) + super().append(task) + # Create inner path cache for faster lookup by filename + self.inner_paths[task["inner_path"]] = task + + def remove(self, task): + if task not in self: + raise ValueError("%r not in list" % task) + else: + super().remove(task) + + def findTask(self, inner_path): + return self.inner_paths.get(inner_path, None) diff --git a/src/Worker/__init__.py b/src/Worker/__init__.py new file mode 100644 index 00000000..f4d20a96 --- /dev/null +++ b/src/Worker/__init__.py @@ -0,0 +1,2 @@ +from .Worker import Worker +from .WorkerManager import WorkerManager diff --git a/src/__init__.py b/src/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/loglevel_overrides.py b/src/loglevel_overrides.py new file mode 100644 index 00000000..5622e523 --- /dev/null +++ b/src/loglevel_overrides.py @@ -0,0 +1,9 @@ +# This file is for adding rules for selectively enabling debug logging +# when working on the code. +# Add your rules here and skip this file when committing changes. + +#import re +#from util import SelectiveLogger +# +#SelectiveLogger.addLogLevelRaisingRule("ConnServer") +#SelectiveLogger.addLogLevelRaisingRule(re.compile(r'^Site:')) diff --git a/src/main.py b/src/main.py new file mode 100644 index 00000000..80ecab2b --- /dev/null +++ b/src/main.py @@ -0,0 +1,603 @@ +# Included modules +import os +import sys +import stat +import time +import logging +import loglevel_overrides + +startup_errors = [] +def startupError(msg): + startup_errors.append(msg) + print("Startup error: %s" % msg) + +# Third party modules +import gevent +if gevent.version_info.major <= 1: # Workaround for random crash when libuv used with threads + try: + if "libev" not in str(gevent.config.loop): + gevent.config.loop = "libev-cext" + except Exception as err: + startupError("Unable to switch gevent loop to libev: %s" % err) + +import gevent.monkey +gevent.monkey.patch_all(thread=False, subprocess=False) + +update_after_shutdown = False # If set True then update and restart zeronet after main loop ended +restart_after_shutdown = False # If set True then restart zeronet after main loop ended + +# Load config +from Config import config +config.parse(silent=True) # Plugins need to access the configuration +if not config.arguments: # Config parse failed, show the help screen and exit + config.parse() + +if not os.path.isdir(config.data_dir): + os.mkdir(config.data_dir) + try: + os.chmod(config.data_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) + except Exception as err: + startupError("Can't change permission of %s: %s" % (config.data_dir, err)) + +if not os.path.isfile("%s/sites.json" % config.data_dir): + open("%s/sites.json" % config.data_dir, "w").write("{}") +if not os.path.isfile("%s/users.json" % config.data_dir): + open("%s/users.json" % config.data_dir, "w").write("{}") + +if config.action == "main": + from util import helper + try: + lock = helper.openLocked("%s/lock.pid" % config.data_dir, "w") + lock.write("%s" % os.getpid()) + except BlockingIOError as err: + startupError("Can't open lock file, your ZeroNet client is probably already running, exiting... (%s)" % err) + if config.open_browser and config.open_browser != "False": + print("Opening browser: %s...", config.open_browser) + import webbrowser + try: + if config.open_browser == "default_browser": + browser = webbrowser.get() + else: + browser = webbrowser.get(config.open_browser) + browser.open("http://%s:%s/%s" % ( + config.ui_ip if config.ui_ip != "*" else "127.0.0.1", config.ui_port, config.homepage + ), new=2) + except Exception as err: + startupError("Error starting browser: %s" % err) + sys.exit() + +config.initLogging() + +# Debug dependent configuration +from Debug import DebugHook + +# Load plugins +from Plugin import PluginManager +PluginManager.plugin_manager.loadPlugins() +config.loadPlugins() +config.parse() # Parse again to add plugin configuration options + +# Log current config +logging.debug("Config: %s" % config) + +# Modify stack size on special hardwares +if config.stack_size: + import threading + threading.stack_size(config.stack_size) + +# Use pure-python implementation of msgpack to save CPU +if config.msgpack_purepython: + os.environ["MSGPACK_PUREPYTHON"] = "True" + +# Fix console encoding on Windows +if sys.platform.startswith("win"): + import subprocess + try: + chcp_res = subprocess.check_output("chcp 65001", shell=True).decode(errors="ignore").strip() + logging.debug("Changed console encoding to utf8: %s" % chcp_res) + except Exception as err: + logging.error("Error changing console encoding to utf8: %s" % err) + +# Socket monkey patch +if config.proxy: + from util import SocksProxy + import urllib.request + logging.info("Patching sockets to socks proxy: %s" % config.proxy) + if config.fileserver_ip == "*": + config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost + config.disable_udp = True # UDP not supported currently with proxy + SocksProxy.monkeyPatch(*config.proxy.split(":")) +elif config.tor == "always": + from util import SocksProxy + import urllib.request + logging.info("Patching sockets to tor socks proxy: %s" % config.tor_proxy) + if config.fileserver_ip == "*": + config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost + SocksProxy.monkeyPatch(*config.tor_proxy.split(":")) + config.disable_udp = True +elif config.bind: + bind = config.bind + if ":" not in config.bind: + bind += ":0" + from util import helper + helper.socketBindMonkeyPatch(*bind.split(":")) + +# -- Actions -- + + +@PluginManager.acceptPlugins +class Actions(object): + def call(self, function_name, kwargs): + logging.info("Version: %s r%s, Python %s, Gevent: %s" % (config.version, config.rev, sys.version, gevent.__version__)) + + func = getattr(self, function_name, None) + back = func(**kwargs) + if back: + print(back) + + # Default action: Start serving UiServer and FileServer + def main(self): + global ui_server, file_server + from File import FileServer + from Ui import UiServer + logging.info("Creating FileServer....") + file_server = FileServer() + logging.info("Creating UiServer....") + ui_server = UiServer() + file_server.ui_server = ui_server + + for startup_error in startup_errors: + logging.error("Startup error: %s" % startup_error) + + logging.info("Removing old SSL certs...") + from Crypt import CryptConnection + CryptConnection.manager.removeCerts() + + logging.info("Starting servers....") + gevent.joinall([gevent.spawn(ui_server.start), gevent.spawn(file_server.start)]) + logging.info("All servers stopped") + + # Site commands + + def siteCreate(self, use_master_seed=True): + logging.info("Generating new privatekey (use_master_seed: %s)..." % config.use_master_seed) + from Crypt import CryptBitcoin + if use_master_seed: + from User import UserManager + user = UserManager.user_manager.get() + if not user: + user = UserManager.user_manager.create() + address, address_index, site_data = user.getNewSiteData() + privatekey = site_data["privatekey"] + logging.info("Generated using master seed from users.json, site index: %s" % address_index) + else: + privatekey = CryptBitcoin.newPrivatekey() + address = CryptBitcoin.privatekeyToAddress(privatekey) + logging.info("----------------------------------------------------------------------") + logging.info("Site private key: %s" % privatekey) + logging.info(" !!! ^ Save it now, required to modify the site ^ !!!") + logging.info("Site address: %s" % address) + logging.info("----------------------------------------------------------------------") + + while True and not config.batch and not use_master_seed: + if input("? Have you secured your private key? (yes, no) > ").lower() == "yes": + break + else: + logging.info("Please, secure it now, you going to need it to modify your site!") + + logging.info("Creating directory structure...") + from Site.Site import Site + from Site import SiteManager + SiteManager.site_manager.load() + + os.mkdir("%s/%s" % (config.data_dir, address)) + open("%s/%s/index.html" % (config.data_dir, address), "w").write("Hello %s!" % address) + + logging.info("Creating content.json...") + site = Site(address) + extend = {"postmessage_nonce_security": True} + if use_master_seed: + extend["address_index"] = address_index + + site.content_manager.sign(privatekey=privatekey, extend=extend) + site.settings["own"] = True + site.saveSettings() + + logging.info("Site created!") + + def siteSign(self, address, privatekey=None, inner_path="content.json", publish=False, remove_missing_optional=False): + from Site.Site import Site + from Site import SiteManager + from Debug import Debug + SiteManager.site_manager.load() + logging.info("Signing site: %s..." % address) + site = Site(address, allow_create=False) + + if not privatekey: # If no privatekey defined + from User import UserManager + user = UserManager.user_manager.get() + if user: + site_data = user.getSiteData(address) + privatekey = site_data.get("privatekey") + else: + privatekey = None + if not privatekey: + # Not found in users.json, ask from console + import getpass + privatekey = getpass.getpass("Private key (input hidden):") + try: + succ = site.content_manager.sign( + inner_path=inner_path, privatekey=privatekey, + update_changed_files=True, remove_missing_optional=remove_missing_optional + ) + except Exception as err: + logging.error("Sign error: %s" % Debug.formatException(err)) + succ = False + if succ and publish: + self.sitePublish(address, inner_path=inner_path) + + def siteVerify(self, address): + import time + from Site.Site import Site + from Site import SiteManager + SiteManager.site_manager.load() + + s = time.time() + logging.info("Verifing site: %s..." % address) + site = Site(address) + bad_files = [] + + for content_inner_path in site.content_manager.contents: + s = time.time() + logging.info("Verifing %s signature..." % content_inner_path) + err = None + try: + file_correct = site.content_manager.verifyFile( + content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False + ) + except Exception as err: + file_correct = False + + if file_correct is True: + logging.info("[OK] %s (Done in %.3fs)" % (content_inner_path, time.time() - s)) + else: + logging.error("[ERROR] %s: invalid file: %s!" % (content_inner_path, err)) + input("Continue?") + bad_files += content_inner_path + + logging.info("Verifying site files...") + bad_files += site.storage.verifyFiles()["bad_files"] + if not bad_files: + logging.info("[OK] All file sha512sum matches! (%.3fs)" % (time.time() - s)) + else: + logging.error("[ERROR] Error during verifying site files!") + + def dbRebuild(self, address): + from Site.Site import Site + from Site import SiteManager + SiteManager.site_manager.load() + + logging.info("Rebuilding site sql cache: %s..." % address) + site = SiteManager.site_manager.get(address) + s = time.time() + try: + site.storage.rebuildDb() + logging.info("Done in %.3fs" % (time.time() - s)) + except Exception as err: + logging.error(err) + + def dbQuery(self, address, query): + from Site.Site import Site + from Site import SiteManager + SiteManager.site_manager.load() + + import json + site = Site(address) + result = [] + for row in site.storage.query(query): + result.append(dict(row)) + print(json.dumps(result, indent=4)) + + def siteAnnounce(self, address): + from Site.Site import Site + from Site import SiteManager + SiteManager.site_manager.load() + + logging.info("Opening a simple connection server") + global file_server + from File import FileServer + file_server = FileServer("127.0.0.1", 1234) + file_server.start() + + logging.info("Announcing site %s to tracker..." % address) + site = Site(address) + + s = time.time() + site.announce() + print("Response time: %.3fs" % (time.time() - s)) + print(site.peers) + + def siteDownload(self, address): + from Site.Site import Site + from Site import SiteManager + SiteManager.site_manager.load() + + logging.info("Opening a simple connection server") + global file_server + from File import FileServer + file_server = FileServer("127.0.0.1", 1234) + file_server_thread = gevent.spawn(file_server.start, check_sites=False) + + site = Site(address) + + on_completed = gevent.event.AsyncResult() + + def onComplete(evt): + evt.set(True) + + site.onComplete.once(lambda: onComplete(on_completed)) + print("Announcing...") + site.announce() + + s = time.time() + print("Downloading...") + site.downloadContent("content.json", check_modifications=True) + + print("Downloaded in %.3fs" % (time.time()-s)) + + def siteNeedFile(self, address, inner_path): + from Site.Site import Site + from Site import SiteManager + SiteManager.site_manager.load() + + def checker(): + while 1: + s = time.time() + time.sleep(1) + print("Switch time:", time.time() - s) + gevent.spawn(checker) + + logging.info("Opening a simple connection server") + global file_server + from File import FileServer + file_server = FileServer("127.0.0.1", 1234) + file_server_thread = gevent.spawn(file_server.start, check_sites=False) + + site = Site(address) + site.announce() + print(site.needFile(inner_path, update=True)) + + def siteCmd(self, address, cmd, parameters): + import json + from Site import SiteManager + + site = SiteManager.site_manager.get(address) + + if not site: + logging.error("Site not found: %s" % address) + return None + + ws = self.getWebsocket(site) + + ws.send(json.dumps({"cmd": cmd, "params": parameters, "id": 1})) + res_raw = ws.recv() + + try: + res = json.loads(res_raw) + except Exception as err: + return {"error": "Invalid result: %s" % err, "res_raw": res_raw} + + if "result" in res: + return res["result"] + else: + return res + + def getWebsocket(self, site): + import websocket + + ws_address = "ws://%s:%s/Websocket?wrapper_key=%s" % (config.ui_ip, config.ui_port, site.settings["wrapper_key"]) + logging.info("Connecting to %s" % ws_address) + ws = websocket.create_connection(ws_address) + return ws + + def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json"): + global file_server + from Site.Site import Site + from Site import SiteManager + from File import FileServer # We need fileserver to handle incoming file requests + from Peer import Peer + file_server = FileServer() + site = SiteManager.site_manager.get(address) + logging.info("Loading site...") + site.settings["serving"] = True # Serving the site even if its disabled + + try: + ws = self.getWebsocket(site) + logging.info("Sending siteReload") + self.siteCmd(address, "siteReload", inner_path) + + logging.info("Sending sitePublish") + self.siteCmd(address, "sitePublish", {"inner_path": inner_path, "sign": False}) + logging.info("Done.") + + except Exception as err: + logging.info("Can't connect to local websocket client: %s" % err) + logging.info("Creating FileServer....") + file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity + time.sleep(0.001) + + # Started fileserver + file_server.portCheck() + if peer_ip: # Announce ip specificed + site.addPeer(peer_ip, peer_port) + else: # Just ask the tracker + logging.info("Gathering peers from tracker") + site.announce() # Gather peers + published = site.publish(5, inner_path) # Push to peers + if published > 0: + time.sleep(3) + logging.info("Serving files (max 60s)...") + gevent.joinall([file_server_thread], timeout=60) + logging.info("Done.") + else: + logging.info("No peers found, sitePublish command only works if you already have visitors serving your site") + + # Crypto commands + def cryptPrivatekeyToAddress(self, privatekey=None): + from Crypt import CryptBitcoin + if not privatekey: # If no privatekey in args then ask it now + import getpass + privatekey = getpass.getpass("Private key (input hidden):") + + print(CryptBitcoin.privatekeyToAddress(privatekey)) + + def cryptSign(self, message, privatekey): + from Crypt import CryptBitcoin + print(CryptBitcoin.sign(message, privatekey)) + + def cryptVerify(self, message, sign, address): + from Crypt import CryptBitcoin + print(CryptBitcoin.verify(message, address, sign)) + + def cryptGetPrivatekey(self, master_seed, site_address_index=None): + from Crypt import CryptBitcoin + if len(master_seed) != 64: + logging.error("Error: Invalid master seed length: %s (required: 64)" % len(master_seed)) + return False + privatekey = CryptBitcoin.hdPrivatekey(master_seed, site_address_index) + print("Requested private key: %s" % privatekey) + + # Peer + def peerPing(self, peer_ip, peer_port=None): + if not peer_port: + peer_port = 15441 + logging.info("Opening a simple connection server") + global file_server + from Connection import ConnectionServer + file_server = ConnectionServer("127.0.0.1", 1234) + file_server.start(check_connections=False) + from Crypt import CryptConnection + CryptConnection.manager.loadCerts() + + from Peer import Peer + logging.info("Pinging 5 times peer: %s:%s..." % (peer_ip, int(peer_port))) + s = time.time() + peer = Peer(peer_ip, peer_port) + peer.connect() + + if not peer.connection: + print("Error: Can't connect to peer (connection error: %s)" % peer.connection_error) + return False + if "shared_ciphers" in dir(peer.connection.sock): + print("Shared ciphers:", peer.connection.sock.shared_ciphers()) + if "cipher" in dir(peer.connection.sock): + print("Cipher:", peer.connection.sock.cipher()[0]) + if "version" in dir(peer.connection.sock): + print("TLS version:", peer.connection.sock.version()) + print("Connection time: %.3fs (connection error: %s)" % (time.time() - s, peer.connection_error)) + + for i in range(5): + ping_delay = peer.ping() + print("Response time: %.3fs" % ping_delay) + time.sleep(1) + peer.remove() + print("Reconnect test...") + peer = Peer(peer_ip, peer_port) + for i in range(5): + ping_delay = peer.ping() + print("Response time: %.3fs" % ping_delay) + time.sleep(1) + + def peerGetFile(self, peer_ip, peer_port, site, filename, benchmark=False): + logging.info("Opening a simple connection server") + global file_server + from Connection import ConnectionServer + file_server = ConnectionServer("127.0.0.1", 1234) + file_server.start(check_connections=False) + from Crypt import CryptConnection + CryptConnection.manager.loadCerts() + + from Peer import Peer + logging.info("Getting %s/%s from peer: %s:%s..." % (site, filename, peer_ip, peer_port)) + peer = Peer(peer_ip, peer_port) + s = time.time() + if benchmark: + for i in range(10): + peer.getFile(site, filename), + print("Response time: %.3fs" % (time.time() - s)) + input("Check memory") + else: + print(peer.getFile(site, filename).read()) + + def peerCmd(self, peer_ip, peer_port, cmd, parameters): + logging.info("Opening a simple connection server") + global file_server + from Connection import ConnectionServer + file_server = ConnectionServer() + file_server.start(check_connections=False) + from Crypt import CryptConnection + CryptConnection.manager.loadCerts() + + from Peer import Peer + peer = Peer(peer_ip, peer_port) + + import json + if parameters: + parameters = json.loads(parameters.replace("'", '"')) + else: + parameters = {} + try: + res = peer.request(cmd, parameters) + print(json.dumps(res, indent=2, ensure_ascii=False)) + except Exception as err: + print("Unknown response (%s): %s" % (err, res)) + + def getConfig(self): + import json + print(json.dumps(config.getServerInfo(), indent=2, ensure_ascii=False)) + + def test(self, test_name, *args, **kwargs): + import types + def funcToName(func_name): + test_name = func_name.replace("test", "") + return test_name[0].lower() + test_name[1:] + + test_names = [funcToName(name) for name in dir(self) if name.startswith("test") and name != "test"] + if not test_name: + # No test specificed, list tests + print("\nNo test specified, possible tests:") + for test_name in test_names: + func_name = "test" + test_name[0].upper() + test_name[1:] + func = getattr(self, func_name) + if func.__doc__: + print("- %s: %s" % (test_name, func.__doc__.strip())) + else: + print("- %s" % test_name) + return None + + # Run tests + func_name = "test" + test_name[0].upper() + test_name[1:] + if hasattr(self, func_name): + func = getattr(self, func_name) + print("- Running test: %s" % test_name, end="") + s = time.time() + ret = func(*args, **kwargs) + if type(ret) is types.GeneratorType: + for progress in ret: + print(progress, end="") + sys.stdout.flush() + print("\n* Test %s done in %.3fs" % (test_name, time.time() - s)) + else: + print("Unknown test: %r (choose from: %s)" % ( + test_name, test_names + )) + + +actions = Actions() +# Starts here when running zeronet.py + + +def start(): + # Call function + action_kwargs = config.getActionArguments() + actions.call(config.action, action_kwargs) diff --git a/src/util/Cached.py b/src/util/Cached.py new file mode 100644 index 00000000..72d60dbc --- /dev/null +++ b/src/util/Cached.py @@ -0,0 +1,68 @@ +import time + + +class Cached(object): + def __init__(self, timeout): + self.cache_db = {} + self.timeout = timeout + + def __call__(self, func): + def wrapper(*args, **kwargs): + key = "%s %s" % (args, kwargs) + cached_value = None + cache_hit = False + if key in self.cache_db: + cache_hit = True + cached_value, time_cached_end = self.cache_db[key] + if time.time() > time_cached_end: + self.cleanupExpired() + cached_value = None + cache_hit = False + + if cache_hit: + return cached_value + else: + cached_value = func(*args, **kwargs) + time_cached_end = time.time() + self.timeout + self.cache_db[key] = (cached_value, time_cached_end) + return cached_value + + wrapper.emptyCache = self.emptyCache + + return wrapper + + def cleanupExpired(self): + for key in list(self.cache_db.keys()): + cached_value, time_cached_end = self.cache_db[key] + if time.time() > time_cached_end: + del(self.cache_db[key]) + + def emptyCache(self): + num = len(self.cache_db) + self.cache_db.clear() + return num + + +if __name__ == "__main__": + from gevent import monkey + monkey.patch_all() + + @Cached(timeout=2) + def calcAdd(a, b): + print("CalcAdd", a, b) + return a + b + + @Cached(timeout=1) + def calcMultiply(a, b): + print("calcMultiply", a, b) + return a * b + + for i in range(5): + print("---") + print("Emptied", calcAdd.emptyCache()) + assert calcAdd(1, 2) == 3 + print("Emptied", calcAdd.emptyCache()) + assert calcAdd(1, 2) == 3 + assert calcAdd(2, 3) == 5 + assert calcMultiply(2, 3) == 6 + time.sleep(1) diff --git a/src/util/CircularIterator.py b/src/util/CircularIterator.py new file mode 100644 index 00000000..3466092e --- /dev/null +++ b/src/util/CircularIterator.py @@ -0,0 +1,34 @@ +import random + +class CircularIterator: + def __init__(self): + self.successive_count = 0 + self.last_size = 0 + self.index = -1 + + def next(self, items): + self.last_size = len(items) + + if self.last_size == 0: + return None + + if self.index < 0: + self.index = random.randint(0, self.last_size) + else: + self.index += 1 + + self.index = self.index % self.last_size + + self.successive_count += 1 + + return items[self.index] + + def resetSuccessiveCount(self): + self.successive_count = 0 + + def getSuccessiveCount(self): + return self.successive_count + + def isWrapped(self): + return self.successive_count >= self.last_size + diff --git a/src/util/Diff.py b/src/util/Diff.py new file mode 100644 index 00000000..8281188b --- /dev/null +++ b/src/util/Diff.py @@ -0,0 +1,48 @@ +import io + +import difflib + + +def sumLen(lines): + return sum(map(len, lines)) + + +def diff(old, new, limit=False): + matcher = difflib.SequenceMatcher(None, old, new) + actions = [] + size = 0 + for tag, old_from, old_to, new_from, new_to in matcher.get_opcodes(): + if tag == "insert": + new_line = new[new_from:new_to] + actions.append(("+", new_line)) + size += sum(map(len, new_line)) + elif tag == "equal": + actions.append(("=", sumLen(old[old_from:old_to]))) + elif tag == "delete": + actions.append(("-", sumLen(old[old_from:old_to]))) + elif tag == "replace": + actions.append(("-", sumLen(old[old_from:old_to]))) + new_lines = new[new_from:new_to] + actions.append(("+", new_lines)) + size += sumLen(new_lines) + if limit and size > limit: + return False + return actions + + +def patch(old_f, actions): + new_f = io.BytesIO() + for action, param in actions: + if type(action) is bytes: + action = action.decode() + if action == "=": # Same lines + new_f.write(old_f.read(param)) + elif action == "-": # Delete lines + old_f.seek(param, 1) # Seek from current position + continue + elif action == "+": # Add lines + for add_line in param: + new_f.write(add_line) + else: + raise "Unknown action: %s" % action + return new_f diff --git a/src/util/Electrum.py b/src/util/Electrum.py new file mode 100644 index 00000000..112151aa --- /dev/null +++ b/src/util/Electrum.py @@ -0,0 +1,39 @@ +import hashlib +import struct + + +# Electrum, the heck?! + +def bchr(i): + return struct.pack("B", i) + +def encode(val, base, minlen=0): + base, minlen = int(base), int(minlen) + code_string = b"".join([bchr(x) for x in range(256)]) + result = b"" + while val > 0: + index = val % base + result = code_string[index:index + 1] + result + val //= base + return code_string[0:1] * max(minlen - len(result), 0) + result + +def insane_int(x): + x = int(x) + if x < 253: + return bchr(x) + elif x < 65536: + return bchr(253) + encode(x, 256, 2)[::-1] + elif x < 4294967296: + return bchr(254) + encode(x, 256, 4)[::-1] + else: + return bchr(255) + encode(x, 256, 8)[::-1] + + +def magic(message): + return b"\x18Bitcoin Signed Message:\n" + insane_int(len(message)) + message + +def format(message): + return hashlib.sha256(magic(message)).digest() + +def dbl_format(message): + return hashlib.sha256(format(message)).digest() diff --git a/src/util/Event.py b/src/util/Event.py new file mode 100644 index 00000000..9d642736 --- /dev/null +++ b/src/util/Event.py @@ -0,0 +1,55 @@ +# Based on http://stackoverflow.com/a/2022629 + + +class Event(list): + + def __call__(self, *args, **kwargs): + for f in self[:]: + if "once" in dir(f) and f in self: + self.remove(f) + f(*args, **kwargs) + + def __repr__(self): + return "Event(%s)" % list.__repr__(self) + + def once(self, func, name=None): + func.once = True + func.name = None + if name: # Dont function with same name twice + names = [f.name for f in self if "once" in dir(f)] + if name not in names: + func.name = name + self.append(func) + else: + self.append(func) + return self + + +if __name__ == "__main__": + def testBenchmark(): + def say(pre, text): + print("%s Say: %s" % (pre, text)) + + import time + s = time.time() + on_changed = Event() + for i in range(1000): + on_changed.once(lambda pre: say(pre, "once"), "once") + print("Created 1000 once in %.3fs" % (time.time() - s)) + on_changed("#1") + + def testUsage(): + def say(pre, text): + print("%s Say: %s" % (pre, text)) + + on_changed = Event() + on_changed.once(lambda pre: say(pre, "once")) + on_changed.once(lambda pre: say(pre, "once")) + on_changed.once(lambda pre: say(pre, "namedonce"), "namedonce") + on_changed.once(lambda pre: say(pre, "namedonce"), "namedonce") + on_changed.append(lambda pre: say(pre, "always")) + on_changed("#1") + on_changed("#2") + on_changed("#3") + + testBenchmark() diff --git a/src/util/Flag.py b/src/util/Flag.py new file mode 100644 index 00000000..37cfdfba --- /dev/null +++ b/src/util/Flag.py @@ -0,0 +1,22 @@ +from collections import defaultdict + + +class Flag(object): + def __init__(self): + self.valid_flags = set([ + "admin", # Only allowed to run sites with ADMIN permission + "async_run", # Action will be ran async with gevent.spawn + "no_multiuser" # Action disabled if Multiuser plugin running in open proxy mode + ]) + self.db = defaultdict(set) + + def __getattr__(self, key): + def func(f): + if key not in self.valid_flags: + raise Exception("Invalid flag: %s (valid: %s)" % (key, self.valid_flags)) + self.db[f.__name__].add(key) + return f + return func + + +flag = Flag() diff --git a/src/util/GreenletManager.py b/src/util/GreenletManager.py new file mode 100644 index 00000000..d711d09a --- /dev/null +++ b/src/util/GreenletManager.py @@ -0,0 +1,44 @@ +import gevent +from Debug import Debug + + +class GreenletManager: + # pool is either gevent.pool.Pool or GreenletManager. + # if pool is None, new gevent.pool.Pool() is created. + def __init__(self, pool=None): + self.greenlets = set() + if not pool: + pool = gevent.pool.Pool(None) + self.pool = pool + + def _spawn_later(self, seconds, *args, **kwargs): + # If pool is another GreenletManager, delegate to it. + if hasattr(self.pool, 'spawnLater'): + return self.pool.spawnLater(seconds, *args, **kwargs) + + # There's gevent.spawn_later(), but there isn't gevent.pool.Pool.spawn_later(). + # Doing manually. + greenlet = self.pool.greenlet_class(*args, **kwargs) + self.pool.add(greenlet) + greenlet.start_later(seconds) + return greenlet + + def _spawn(self, *args, **kwargs): + return self.pool.spawn(*args, **kwargs) + + def spawnLater(self, *args, **kwargs): + greenlet = self._spawn_later(*args, **kwargs) + greenlet.link(lambda greenlet: self.greenlets.remove(greenlet)) + self.greenlets.add(greenlet) + return greenlet + + def spawn(self, *args, **kwargs): + greenlet = self._spawn(*args, **kwargs) + greenlet.link(lambda greenlet: self.greenlets.remove(greenlet)) + self.greenlets.add(greenlet) + return greenlet + + def stopGreenlets(self, reason="Stopping all greenlets"): + num = len(self.greenlets) + gevent.killall(list(self.greenlets), Debug.createNotifyType(reason), block=False) + return num diff --git a/src/util/Msgpack.py b/src/util/Msgpack.py new file mode 100644 index 00000000..1033f92e --- /dev/null +++ b/src/util/Msgpack.py @@ -0,0 +1,101 @@ +import os +import struct +import io + +import msgpack +import msgpack.fallback + + +def msgpackHeader(size): + if size <= 2 ** 8 - 1: + return b"\xc4" + struct.pack("B", size) + elif size <= 2 ** 16 - 1: + return b"\xc5" + struct.pack(">H", size) + elif size <= 2 ** 32 - 1: + return b"\xc6" + struct.pack(">I", size) + else: + raise Exception("huge binary string") + + +def stream(data, writer): + packer = msgpack.Packer(use_bin_type=True) + writer(packer.pack_map_header(len(data))) + for key, val in data.items(): + writer(packer.pack(key)) + if isinstance(val, io.IOBase): # File obj + max_size = os.fstat(val.fileno()).st_size - val.tell() + size = min(max_size, val.read_bytes) + bytes_left = size + writer(msgpackHeader(size)) + buff = 1024 * 64 + while 1: + writer(val.read(min(bytes_left, buff))) + bytes_left = bytes_left - buff + if bytes_left <= 0: + break + else: # Simple + writer(packer.pack(val)) + return size + + +class FilePart(object): + __slots__ = ("file", "read_bytes", "__class__") + + def __init__(self, *args, **kwargs): + self.file = open(*args, **kwargs) + self.__enter__ == self.file.__enter__ + + def __getattr__(self, attr): + return getattr(self.file, attr) + + def __enter__(self, *args, **kwargs): + return self.file.__enter__(*args, **kwargs) + + def __exit__(self, *args, **kwargs): + return self.file.__exit__(*args, **kwargs) + + +# Don't try to decode the value of these fields as utf8 +bin_value_keys = ("hashfield_raw", "peers", "peers_ipv6", "peers_onion", "body", "sites", "bin") + + +def objectDecoderHook(obj): + global bin_value_keys + back = {} + for key, val in obj: + if type(key) is bytes: + key = key.decode("utf8") + if key in bin_value_keys or type(val) is not bytes or len(key) >= 64: + back[key] = val + else: + back[key] = val.decode("utf8") + return back + + +def getUnpacker(fallback=False, decode=True): + if fallback: # Pure Python + unpacker = msgpack.fallback.Unpacker + else: + unpacker = msgpack.Unpacker + + extra_kwargs = {"max_buffer_size": 5 * 1024 * 1024} + if msgpack.version[0] >= 1: + extra_kwargs["strict_map_key"] = False + + if decode: # Workaround for backward compatibility: Try to decode bin to str + unpacker = unpacker(raw=True, object_pairs_hook=objectDecoderHook, **extra_kwargs) + else: + unpacker = unpacker(raw=False, **extra_kwargs) + + return unpacker + + +def pack(data, use_bin_type=True): + return msgpack.packb(data, use_bin_type=use_bin_type) + + +def unpack(data, decode=True): + unpacker = getUnpacker(decode=decode) + unpacker.feed(data) + return next(unpacker) + diff --git a/src/util/Noparallel.py b/src/util/Noparallel.py new file mode 100644 index 00000000..4a4a854d --- /dev/null +++ b/src/util/Noparallel.py @@ -0,0 +1,202 @@ +import gevent +import time +from gevent.event import AsyncResult + +from . import ThreadPool + + +class Noparallel: # Only allow function running once in same time + + def __init__(self, blocking=True, ignore_args=False, ignore_class=False, queue=False): + self.threads = {} + self.blocking = blocking # Blocking: Acts like normal function else thread returned + self.queue = queue # Execute again when blocking is done + self.queued = False + self.ignore_args = ignore_args # Block does not depend on function call arguments + self.ignore_class = ignore_class # Block does not depeds on class instance + + def __call__(self, func): + def wrapper(*args, **kwargs): + if not ThreadPool.isMainThread(): + return ThreadPool.main_loop.call(wrapper, *args, **kwargs) + + if self.ignore_class: + key = func # Unique key only by function and class object + elif self.ignore_args: + key = (func, args[0]) # Unique key only by function and class object + else: + key = (func, tuple(args), str(kwargs)) # Unique key for function including parameters + if key in self.threads: # Thread already running (if using blocking mode) + if self.queue: + self.queued = True + thread = self.threads[key] + if self.blocking: + if self.queued: + res = thread.get() # Blocking until its finished + if key in self.threads: + return self.threads[key].get() # Queue finished since started running + self.queued = False + return wrapper(*args, **kwargs) # Run again after the end + else: + return thread.get() # Return the value + + else: # No blocking + if thread.ready(): # Its finished, create a new + thread = gevent.spawn(func, *args, **kwargs) + self.threads[key] = thread + return thread + else: # Still running + return thread + else: # Thread not running + if self.blocking: # Wait for finish + asyncres = AsyncResult() + self.threads[key] = asyncres + try: + res = func(*args, **kwargs) + asyncres.set(res) + self.cleanup(key, asyncres) + return res + except Exception as err: + asyncres.set_exception(err) + self.cleanup(key, asyncres) + raise(err) + else: # No blocking just return the thread + thread = gevent.spawn(func, *args, **kwargs) # Spawning new thread + thread.link(lambda thread: self.cleanup(key, thread)) + self.threads[key] = thread + return thread + wrapper.__name__ = func.__name__ + + return wrapper + + # Cleanup finished threads + def cleanup(self, key, thread): + if key in self.threads: + del(self.threads[key]) + + +if __name__ == "__main__": + + + class Test(): + + @Noparallel() + def count(self, num=5): + for i in range(num): + print(self, i) + time.sleep(1) + return "%s return:%s" % (self, i) + + class TestNoblock(): + + @Noparallel(blocking=False) + def count(self, num=5): + for i in range(num): + print(self, i) + time.sleep(1) + return "%s return:%s" % (self, i) + + def testBlocking(): + test = Test() + test2 = Test() + print("Counting...") + print("Creating class1/thread1") + thread1 = gevent.spawn(test.count) + print("Creating class1/thread2 (ignored)") + thread2 = gevent.spawn(test.count) + print("Creating class2/thread3") + thread3 = gevent.spawn(test2.count) + + print("Joining class1/thread1") + thread1.join() + print("Joining class1/thread2") + thread2.join() + print("Joining class2/thread3") + thread3.join() + + print("Creating class1/thread4 (its finished, allowed again)") + thread4 = gevent.spawn(test.count) + print("Joining thread4") + thread4.join() + + print(thread1.value, thread2.value, thread3.value, thread4.value) + print("Done.") + + def testNoblocking(): + test = TestNoblock() + test2 = TestNoblock() + print("Creating class1/thread1") + thread1 = test.count() + print("Creating class1/thread2 (ignored)") + thread2 = test.count() + print("Creating class2/thread3") + thread3 = test2.count() + print("Joining class1/thread1") + thread1.join() + print("Joining class1/thread2") + thread2.join() + print("Joining class2/thread3") + thread3.join() + + print("Creating class1/thread4 (its finished, allowed again)") + thread4 = test.count() + print("Joining thread4") + thread4.join() + + print(thread1.value, thread2.value, thread3.value, thread4.value) + print("Done.") + + def testBenchmark(): + import time + + def printThreadNum(): + import gc + from greenlet import greenlet + objs = [obj for obj in gc.get_objects() if isinstance(obj, greenlet)] + print("Greenlets: %s" % len(objs)) + + printThreadNum() + test = TestNoblock() + s = time.time() + for i in range(3): + gevent.spawn(test.count, i + 1) + print("Created in %.3fs" % (time.time() - s)) + printThreadNum() + time.sleep(5) + + def testException(): + import time + @Noparallel(blocking=True, queue=True) + def count(self, num=5): + s = time.time() + # raise Exception("err") + for i in range(num): + print(self, i) + time.sleep(1) + return "%s return:%s" % (s, i) + def caller(): + try: + print("Ret:", count(5)) + except Exception as err: + print("Raised:", repr(err)) + + gevent.joinall([ + gevent.spawn(caller), + gevent.spawn(caller), + gevent.spawn(caller), + gevent.spawn(caller) + ]) + + + from gevent import monkey + monkey.patch_all() + + testException() + + """ + testBenchmark() + print("Testing blocking mode...") + testBlocking() + print("Testing noblocking mode...") + testNoblocking() + """ diff --git a/src/util/OpensslFindPatch.py b/src/util/OpensslFindPatch.py new file mode 100644 index 00000000..0f5d2dc6 --- /dev/null +++ b/src/util/OpensslFindPatch.py @@ -0,0 +1,69 @@ +import logging +import os +import sys +import ctypes.util + +from Config import config + +find_library_original = ctypes.util.find_library + + +def getOpensslPath(): + if config.openssl_lib_file: + return config.openssl_lib_file + + if sys.platform.startswith("win"): + lib_paths = [ + os.path.join(os.getcwd(), "tools/openssl/libeay32.dll"), # ZeroBundle Windows + os.path.join(os.path.dirname(sys.executable), "DLLs/libcrypto-1_1-x64.dll"), + os.path.join(os.path.dirname(sys.executable), "DLLs/libcrypto-1_1.dll") + ] + elif sys.platform == "cygwin": + lib_paths = ["/bin/cygcrypto-1.0.0.dll"] + else: + lib_paths = [ + "../runtime/lib/libcrypto.so.1.1", # ZeroBundle Linux + "../../Frameworks/libcrypto.1.1.dylib", # ZeroBundle macOS + "/opt/lib/libcrypto.so.1.0.0", # For optware and entware + "/usr/local/ssl/lib/libcrypto.so" + ] + + for lib_path in lib_paths: + if os.path.isfile(lib_path): + return lib_path + + if "ANDROID_APP_PATH" in os.environ: + try: + lib_dir = os.environ["ANDROID_APP_PATH"] + "/../../lib" + return [lib for lib in os.listdir(lib_dir) if "crypto" in lib][0] + except Exception as err: + logging.debug("OpenSSL lib not found in: %s (%s)" % (lib_dir, err)) + + if "LD_LIBRARY_PATH" in os.environ: + lib_dir_paths = os.environ["LD_LIBRARY_PATH"].split(":") + for path in lib_dir_paths: + try: + return [lib for lib in os.listdir(path) if "libcrypto.so" in lib][0] + except Exception as err: + logging.debug("OpenSSL lib not found in: %s (%s)" % (path, err)) + + lib_path = ( + find_library_original('ssl.so') or find_library_original('ssl') or + find_library_original('crypto') or find_library_original('libcrypto') or 'libeay32' + ) + + return lib_path + + +def patchCtypesOpensslFindLibrary(): + def findLibraryPatched(name): + if name in ("ssl", "crypto", "libeay32"): + lib_path = getOpensslPath() + return lib_path + else: + return find_library_original(name) + + ctypes.util.find_library = findLibraryPatched + + +patchCtypesOpensslFindLibrary() diff --git a/src/util/Platform.py b/src/util/Platform.py new file mode 100644 index 00000000..5bdde2f8 --- /dev/null +++ b/src/util/Platform.py @@ -0,0 +1,36 @@ +import sys +import logging + + +def setMaxfilesopened(limit): + try: + if sys.platform == "win32": + import ctypes + dll = None + last_err = None + for dll_name in ["msvcr100", "msvcr110", "msvcr120"]: + try: + dll = getattr(ctypes.cdll, dll_name) + break + except OSError as err: + last_err = err + + if not dll: + raise last_err + + maxstdio = dll._getmaxstdio() + if maxstdio < limit: + logging.debug("%s: Current maxstdio: %s, changing to %s..." % (dll, maxstdio, limit)) + dll._setmaxstdio(limit) + return True + else: + import resource + soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE) + if soft < limit: + logging.debug("Current RLIMIT_NOFILE: %s (max: %s), changing to %s..." % (soft, hard, limit)) + resource.setrlimit(resource.RLIMIT_NOFILE, (limit, hard)) + return True + + except Exception as err: + logging.error("Failed to modify max files open limit: %s" % err) + return False diff --git a/src/util/Pooled.py b/src/util/Pooled.py new file mode 100644 index 00000000..9a4a7b63 --- /dev/null +++ b/src/util/Pooled.py @@ -0,0 +1,65 @@ +import gevent.pool + + +class Pooled(object): + def __init__(self, size=100): + self.pool = gevent.pool.Pool(size) + self.pooler_running = False + self.queue = [] + self.func = None + + def waiter(self, evt, args, kwargs): + res = self.func(*args, **kwargs) + if type(res) == gevent.event.AsyncResult: + evt.set(res.get()) + else: + evt.set(res) + + def pooler(self): + while self.queue: + evt, args, kwargs = self.queue.pop(0) + self.pool.spawn(self.waiter, evt, args, kwargs) + self.pooler_running = False + + def __call__(self, func): + def wrapper(*args, **kwargs): + evt = gevent.event.AsyncResult() + self.queue.append((evt, args, kwargs)) + if not self.pooler_running: + self.pooler_running = True + gevent.spawn(self.pooler) + return evt + wrapper.__name__ = func.__name__ + self.func = func + + return wrapper + +if __name__ == "__main__": + import gevent + import gevent.pool + import gevent.queue + import gevent.event + import gevent.monkey + import time + + gevent.monkey.patch_all() + + def addTask(inner_path): + evt = gevent.event.AsyncResult() + gevent.spawn_later(1, lambda: evt.set(True)) + return evt + + def needFile(inner_path): + return addTask(inner_path) + + @Pooled(10) + def pooledNeedFile(inner_path): + return needFile(inner_path) + + threads = [] + for i in range(100): + threads.append(pooledNeedFile(i)) + + s = time.time() + gevent.joinall(threads) # Should take 10 second + print(time.time() - s) diff --git a/src/util/QueryJson.py b/src/util/QueryJson.py new file mode 100644 index 00000000..d9921ff0 --- /dev/null +++ b/src/util/QueryJson.py @@ -0,0 +1,67 @@ +import json +import re +import os + + +def queryFile(file_path, filter_path, filter_key=None, filter_val=None): + back = [] + data = json.load(open(file_path)) + if filter_path == ['']: + return [data] + for key in filter_path: # Get to the point + data = data.get(key) + if not data: + return + + if type(data) == list: + for row in data: + if filter_val: # Filter by value + if row[filter_key] == filter_val: + back.append(row) + else: + back.append(row) + else: + back.append({"value": data}) + + return back + + +# Find in json files +# Return: [{u'body': u'Hello Topic 1!!', 'inner_path': '1KRxE1...beEp6', u'added': 1422740732, u'message_id': 1},...] +def query(path_pattern, filter): + if "=" in filter: # Filter by value + filter_path, filter_val = filter.split("=") + filter_path = filter_path.split(".") + filter_key = filter_path.pop() # Last element is the key + filter_val = int(filter_val) + else: # No filter + filter_path = filter + filter_path = filter_path.split(".") + filter_key = None + filter_val = None + + if "/*/" in path_pattern: # Wildcard search + root_dir, file_pattern = path_pattern.replace("\\", "/").split("/*/") + else: # No wildcard + root_dir, file_pattern = re.match("(.*)/(.*?)$", path_pattern.replace("\\", "/")).groups() + for root, dirs, files in os.walk(root_dir, topdown=False): + root = root.replace("\\", "/") + inner_path = root.replace(root_dir, "").strip("/") + for file_name in files: + if file_pattern != file_name: + continue + + try: + res = queryFile(root + "/" + file_name, filter_path, filter_key, filter_val) + if not res: + continue + except Exception: # Json load error + continue + for row in res: + row["inner_path"] = inner_path + yield row + + +if __name__ == "__main__": + for row in list(query("../../data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/data/users/*/data.json", "")): + print(row) diff --git a/src/util/RateLimit.py b/src/util/RateLimit.py new file mode 100644 index 00000000..465859c2 --- /dev/null +++ b/src/util/RateLimit.py @@ -0,0 +1,128 @@ +import time +import gevent +import logging + +log = logging.getLogger("RateLimit") + +called_db = {} # Holds events last call time +queue_db = {} # Commands queued to run + +# Register event as called +# Return: None + + +def called(event, penalty=0): + called_db[event] = time.time() + penalty + + +# Check if calling event is allowed +# Return: True if allowed False if not +def isAllowed(event, allowed_again=10): + last_called = called_db.get(event) + if not last_called: # Its not called before + return True + elif time.time() - last_called >= allowed_again: + del called_db[event] # Delete last call time to save memory + return True + else: + return False + +def delayLeft(event, allowed_again=10): + last_called = called_db.get(event) + if not last_called: # Its not called before + return 0 + else: + return allowed_again - (time.time() - last_called) + +def callQueue(event): + func, args, kwargs, thread = queue_db[event] + log.debug("Calling: %s" % event) + called(event) + del queue_db[event] + return func(*args, **kwargs) + + +# Rate limit and delay function call if necessary +# If the function called again within the rate limit interval then previous queued call will be dropped +# Return: Immediately gevent thread +def callAsync(event, allowed_again=10, func=None, *args, **kwargs): + if isAllowed(event, allowed_again): # Not called recently, call it now + called(event) + # print "Calling now" + return gevent.spawn(func, *args, **kwargs) + else: # Called recently, schedule it for later + time_left = allowed_again - max(0, time.time() - called_db[event]) + log.debug("Added to queue (%.2fs left): %s " % (time_left, event)) + if not queue_db.get(event): # Function call not queued yet + thread = gevent.spawn_later(time_left, lambda: callQueue(event)) # Call this function later + queue_db[event] = (func, args, kwargs, thread) + return thread + else: # Function call already queued, just update the parameters + thread = queue_db[event][3] + queue_db[event] = (func, args, kwargs, thread) + return thread + + +# Rate limit and delay function call if needed +# Return: Wait for execution/delay then return value +def call(event, allowed_again=10, func=None, *args, **kwargs): + if isAllowed(event): # Not called recently, call it now + called(event) + # print "Calling now", allowed_again + return func(*args, **kwargs) + + else: # Called recently, schedule it for later + time_left = max(0, allowed_again - (time.time() - called_db[event])) + # print "Time left: %s" % time_left, args, kwargs + log.debug("Calling sync (%.2fs left): %s" % (time_left, event)) + called(event, time_left) + time.sleep(time_left) + back = func(*args, **kwargs) + called(event) + return back + + +# Cleanup expired events every 3 minutes +def rateLimitCleanup(): + while 1: + expired = time.time() - 60 * 2 # Cleanup if older than 2 minutes + for event in list(called_db.keys()): + if called_db[event] < expired: + del called_db[event] + time.sleep(60 * 3) # Every 3 minutes +gevent.spawn(rateLimitCleanup) + + +if __name__ == "__main__": + from gevent import monkey + monkey.patch_all() + import random + + def publish(inner_path): + print("Publishing %s..." % inner_path) + return 1 + + def cb(thread): + print("Value:", thread.value) + + print("Testing async spam requests rate limit to 1/sec...") + for i in range(3000): + thread = callAsync("publish content.json", 1, publish, "content.json %s" % i) + time.sleep(float(random.randint(1, 20)) / 100000) + print(thread.link(cb)) + print("Done") + + time.sleep(2) + + print("Testing sync spam requests rate limit to 1/sec...") + for i in range(5): + call("publish data.json", 1, publish, "data.json %s" % i) + time.sleep(float(random.randint(1, 100)) / 100) + print("Done") + + print("Testing cleanup") + thread = callAsync("publish content.json single", 1, publish, "content.json single") + print("Needs to cleanup:", called_db, queue_db) + print("Waiting 3min for cleanup process...") + time.sleep(60 * 3) + print("Cleaned up:", called_db, queue_db) diff --git a/src/util/SafeRe.py b/src/util/SafeRe.py new file mode 100644 index 00000000..8c394a84 --- /dev/null +++ b/src/util/SafeRe.py @@ -0,0 +1,98 @@ +import re +import logging + +log = logging.getLogger("SafeRe") + + + +class UnsafePatternError(Exception): + pass + +max_cache_size = 1000 +cached_patterns = {} +old_cached_patterns = {} + + +def isSafePattern(pattern): + if len(pattern) > 255: + raise UnsafePatternError("Pattern too long: %s characters in %s" % (len(pattern), pattern)) + + unsafe_pattern_match = re.search(r"[^\.][\*\{\+]", pattern) # Always should be "." before "*{+" characters to avoid ReDoS + if unsafe_pattern_match: + raise UnsafePatternError("Potentially unsafe part of the pattern: %s in %s" % (unsafe_pattern_match.group(0), pattern)) + + repetitions1 = re.findall(r"\.[\*\{\+]", pattern) + repetitions2 = re.findall(r"[^(][?]", pattern) + if len(repetitions1) + len(repetitions2) >= 10: + raise UnsafePatternError("More than 10 repetitions in %s" % pattern) + + return True + + +def compilePattern(pattern): + global cached_patterns + global old_cached_patterns + + cached_pattern = cached_patterns.get(pattern) + if cached_pattern: + return cached_pattern + + cached_pattern = old_cached_patterns.get(pattern) + if cached_pattern: + del old_cached_patterns[pattern] + cached_patterns[pattern] = cached_pattern + return cached_pattern + + if isSafePattern(pattern): + cached_pattern = re.compile(pattern) + cached_patterns[pattern] = cached_pattern + log.debug("Compiled new pattern: %s" % pattern) + log.debug("Cache size: %d + %d" % (len(cached_patterns), len(old_cached_patterns))) + + if len(cached_patterns) > max_cache_size: + old_cached_patterns = cached_patterns + cached_patterns = {} + log.debug("Size limit reached. Rotating cache.") + log.debug("Cache size: %d + %d" % (len(cached_patterns), len(old_cached_patterns))) + + return cached_pattern + + +def match(pattern, *args, **kwargs): + cached_pattern = compilePattern(pattern) + return cached_pattern.match(*args, **kwargs) + +################################################################################ + +# TESTS + +def testSafePattern(pattern): + try: + return isSafePattern(pattern) + except UnsafePatternError as err: + return False + + +# Some real examples to make sure it works as expected +assert testSafePattern('(data/mp4/.*|updater/.*)') +assert testSafePattern('((js|css)/(?!all.(js|css)))|.git') + + +# Unsafe cases: + +# ((?!json).)*$ not allowed, because of ) before the * character. Possible fix: .*(?!json)$ +assert not testSafePattern('((?!json).)*$') +assert testSafePattern('.*(?!json)$') + +# (.*.epub|.*.jpg|.*.jpeg|.*.png|data/.*.gif|.*.avi|.*.ogg|.*.webm|.*.mp4|.*.mp3|.*.mkv|.*.eot) not allowed, because it has 12 .* repetition patterns. Possible fix: .*(epub|jpg|jpeg|png|data/gif|avi|ogg|webm|mp4|mp3|mkv|eot) +assert not testSafePattern('(.*.epub|.*.jpg|.*.jpeg|.*.png|data/.*.gif|.*.avi|.*.ogg|.*.webm|.*.mp4|.*.mp3|.*.mkv|.*.eot)') +assert testSafePattern('.*(epub|jpg|jpeg|png|data/gif|avi|ogg|webm|mp4|mp3|mkv|eot)') + +# https://github.com/HelloZeroNet/ZeroNet/issues/2757 +assert not testSafePattern('a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?a?aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa') +assert not testSafePattern('a?a?a?a?a?a?a?x.{0,1}x.{0,1}x.{0,1}') +assert testSafePattern('a?a?a?a?a?a?a?x.{0,1}x.{0,1}') +assert not testSafePattern('a?a?a?a?a?a?a?x.*x.*x.*') +assert testSafePattern('a?a?a?a?a?a?a?x.*x.*') + +################################################################################ diff --git a/src/util/SelectiveLogger.py b/src/util/SelectiveLogger.py new file mode 100644 index 00000000..fcdcba0a --- /dev/null +++ b/src/util/SelectiveLogger.py @@ -0,0 +1,43 @@ +import logging +import re + +log_level_raising_rules = [] + +def addLogLevelRaisingRule(rule, level=None): + if level is None: + level = logging.INFO + log_level_raising_rules.append({ + "rule": rule, + "level": level + }) + +def matchLogLevelRaisingRule(name): + for rule in log_level_raising_rules: + if isinstance(rule["rule"], re.Pattern): + if rule["rule"].search(name): + return rule["level"] + else: + if rule["rule"] == name: + return rule["level"] + return None + +class SelectiveLogger(logging.getLoggerClass()): + def __init__(self, name, level=logging.NOTSET): + return super().__init__(name, level) + + def raiseLevel(self, level): + raised_level = matchLogLevelRaisingRule(self.name) + if raised_level is not None: + if level < raised_level: + level = raised_level + return level + + def isEnabledFor(self, level): + level = self.raiseLevel(level) + return super().isEnabledFor(level) + + def _log(self, level, msg, args, **kwargs): + level = self.raiseLevel(level) + return super()._log(level, msg, args, **kwargs) + +logging.setLoggerClass(SelectiveLogger) diff --git a/src/util/SocksProxy.py b/src/util/SocksProxy.py new file mode 100644 index 00000000..f831137b --- /dev/null +++ b/src/util/SocksProxy.py @@ -0,0 +1,26 @@ +import socket + +import socks +from Config import config + +def create_connection(address, timeout=None, source_address=None): + if address in config.ip_local: + sock = socket.socket_noproxy(socket.AF_INET, socket.SOCK_STREAM) + sock.connect(address) + else: + sock = socks.socksocket() + sock.connect(address) + return sock + + +# Dns queries using the proxy +def getaddrinfo(*args): + return [(socket.AF_INET, socket.SOCK_STREAM, 6, '', (args[0], args[1]))] + + +def monkeyPatch(proxy_ip, proxy_port): + socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, proxy_ip, int(proxy_port)) + socket.socket_noproxy = socket.socket + socket.socket = socks.socksocket + socket.create_connection = create_connection + socket.getaddrinfo = getaddrinfo diff --git a/src/util/ThreadPool.py b/src/util/ThreadPool.py new file mode 100644 index 00000000..5b31ce37 --- /dev/null +++ b/src/util/ThreadPool.py @@ -0,0 +1,180 @@ +import threading +import time +import queue + +import gevent +import gevent.monkey +import gevent.threadpool +import gevent._threading + + +class ThreadPool: + def __init__(self, max_size, name=None): + self.setMaxSize(max_size) + if name: + self.name = name + else: + self.name = "ThreadPool#%s" % id(self) + + def setMaxSize(self, max_size): + self.max_size = max_size + if max_size > 0: + self.pool = gevent.threadpool.ThreadPool(max_size) + else: + self.pool = None + + def wrap(self, func): + if self.pool is None: + return func + + def wrapper(*args, **kwargs): + if not isMainThread(): # Call directly if not in main thread + return func(*args, **kwargs) + res = self.apply(func, args, kwargs) + return res + + return wrapper + + def spawn(self, *args, **kwargs): + if not isMainThread() and not self.pool._semaphore.ready(): + # Avoid semaphore error when spawning from other thread and the pool is full + return main_loop.call(self.spawn, *args, **kwargs) + res = self.pool.spawn(*args, **kwargs) + return res + + def apply(self, func, args=(), kwargs={}): + t = self.spawn(func, *args, **kwargs) + if self.pool._apply_immediately(): + return main_loop.call(t.get) + else: + return t.get() + + def kill(self): + if self.pool is not None and self.pool.size > 0 and main_loop: + main_loop.call(lambda: gevent.spawn(self.pool.kill).join(timeout=1)) + + del self.pool + self.pool = None + + def __enter__(self): + return self + + def __exit__(self, *args): + self.kill() + + +lock_pool = gevent.threadpool.ThreadPool(50) +main_thread_id = threading.current_thread().ident + + +def isMainThread(): + return threading.current_thread().ident == main_thread_id + + +class Lock: + def __init__(self): + self.lock = gevent._threading.Lock() + self.locked = self.lock.locked + self.release = self.lock.release + self.time_lock = 0 + + def acquire(self, *args, **kwargs): + self.time_lock = time.time() + if self.locked() and isMainThread(): + # Start in new thread to avoid blocking gevent loop + return lock_pool.apply(self.lock.acquire, args, kwargs) + else: + return self.lock.acquire(*args, **kwargs) + + def __del__(self): + while self.locked(): + self.release() + + +class Event: + def __init__(self): + self.get_lock = Lock() + self.res = None + self.get_lock.acquire(False) + self.done = False + + def set(self, res): + if self.done: + raise Exception("Event already has value") + self.res = res + self.get_lock.release() + self.done = True + + def get(self): + if not self.done: + self.get_lock.acquire(True) + if self.get_lock.locked(): + self.get_lock.release() + back = self.res + return back + + def __del__(self): + self.res = None + while self.get_lock.locked(): + self.get_lock.release() + + +# Execute function calls in main loop from other threads +class MainLoopCaller(): + def __init__(self): + self.queue_call = queue.Queue() + + self.pool = gevent.threadpool.ThreadPool(1) + self.num_direct = 0 + self.running = True + + def caller(self, func, args, kwargs, event_done): + try: + res = func(*args, **kwargs) + event_done.set((True, res)) + except Exception as err: + event_done.set((False, err)) + + def start(self): + gevent.spawn(self.run) + time.sleep(0.001) + + def run(self): + while self.running: + if self.queue_call.qsize() == 0: # Get queue in new thread to avoid gevent blocking + func, args, kwargs, event_done = self.pool.apply(self.queue_call.get) + else: + func, args, kwargs, event_done = self.queue_call.get() + gevent.spawn(self.caller, func, args, kwargs, event_done) + del func, args, kwargs, event_done + self.running = False + + def call(self, func, *args, **kwargs): + if threading.current_thread().ident == main_thread_id: + return func(*args, **kwargs) + else: + event_done = Event() + self.queue_call.put((func, args, kwargs, event_done)) + success, res = event_done.get() + del event_done + self.queue_call.task_done() + if success: + return res + else: + raise res + + +def patchSleep(): # Fix memory leak by using real sleep in threads + real_sleep = gevent.monkey.get_original("time", "sleep") + + def patched_sleep(seconds): + if isMainThread(): + gevent.sleep(seconds) + else: + real_sleep(seconds) + time.sleep = patched_sleep + + +main_loop = MainLoopCaller() +main_loop.start() +patchSleep() diff --git a/src/util/UpnpPunch.py b/src/util/UpnpPunch.py new file mode 100644 index 00000000..18f4aaee --- /dev/null +++ b/src/util/UpnpPunch.py @@ -0,0 +1,395 @@ +import re +import urllib.request +import http.client +import logging +from urllib.parse import urlparse +from xml.dom.minidom import parseString +from xml.parsers.expat import ExpatError + +from gevent import socket +import gevent + +# Relevant UPnP spec: +# http://www.upnp.org/specs/gw/UPnP-gw-WANIPConnection-v1-Service.pdf + +# General TODOs: +# Handle 0 or >1 IGDs + +logger = logging.getLogger("Upnp") + +class UpnpError(Exception): + pass + + +class IGDError(UpnpError): + """ + Signifies a problem with the IGD. + """ + pass + + +REMOVE_WHITESPACE = re.compile(r'>\s*<') + + +def perform_m_search(local_ip): + """ + Broadcast a UDP SSDP M-SEARCH packet and return response. + """ + search_target = "urn:schemas-upnp-org:device:InternetGatewayDevice:1" + + ssdp_request = ''.join( + ['M-SEARCH * HTTP/1.1\r\n', + 'HOST: 239.255.255.250:1900\r\n', + 'MAN: "ssdp:discover"\r\n', + 'MX: 2\r\n', + 'ST: {0}\r\n'.format(search_target), + '\r\n'] + ).encode("utf8") + + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + + sock.bind((local_ip, 0)) + + sock.sendto(ssdp_request, ('239.255.255.250', 1900)) + if local_ip == "127.0.0.1": + sock.settimeout(1) + else: + sock.settimeout(5) + + try: + return sock.recv(2048).decode("utf8") + except socket.error: + raise UpnpError("No reply from IGD using {} as IP".format(local_ip)) + finally: + sock.close() + + +def _retrieve_location_from_ssdp(response): + """ + Parse raw HTTP response to retrieve the UPnP location header + and return a ParseResult object. + """ + parsed_headers = re.findall(r'(?P.*?): (?P.*?)\r\n', response) + header_locations = [header[1] + for header in parsed_headers + if header[0].lower() == 'location'] + + if len(header_locations) < 1: + raise IGDError('IGD response does not contain a "location" header.') + + return urlparse(header_locations[0]) + + +def _retrieve_igd_profile(url): + """ + Retrieve the device's UPnP profile. + """ + try: + return urllib.request.urlopen(url.geturl(), timeout=5).read().decode('utf-8') + except socket.error: + raise IGDError('IGD profile query timed out') + + +def _get_first_child_data(node): + """ + Get the text value of the first child text node of a node. + """ + return node.childNodes[0].data + + +def _parse_igd_profile(profile_xml): + """ + Traverse the profile xml DOM looking for either + WANIPConnection or WANPPPConnection and return + the 'controlURL' and the service xml schema. + """ + try: + dom = parseString(profile_xml) + except ExpatError as e: + raise IGDError( + 'Unable to parse IGD reply: {0} \n\n\n {1}'.format(profile_xml, e)) + + service_types = dom.getElementsByTagName('serviceType') + for service in service_types: + if _get_first_child_data(service).find('WANIPConnection') > 0 or \ + _get_first_child_data(service).find('WANPPPConnection') > 0: + try: + control_url = _get_first_child_data( + service.parentNode.getElementsByTagName('controlURL')[0]) + upnp_schema = _get_first_child_data(service).split(':')[-2] + return control_url, upnp_schema + except IndexError: + # Pass the error because any error here should raise the + # that's specified outside the for loop. + pass + raise IGDError( + 'Could not find a control url or UPNP schema in IGD response.') + + +# add description +def _get_local_ips(): + def method1(): + try: + # get local ip using UDP and a broadcast address + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) + # Not using because gevents getaddrinfo doesn't like that + # using port 1 as per hobbldygoop's comment about port 0 not working on osx: + # https://github.com/sirMackk/ZeroNet/commit/fdcd15cf8df0008a2070647d4d28ffedb503fba2#commitcomment-9863928 + s.connect(('239.255.255.250', 1)) + return [s.getsockname()[0]] + except: + pass + + def method2(): + # Get ip by using UDP and a normal address (google dns ip) + try: + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + s.connect(('8.8.8.8', 0)) + return [s.getsockname()[0]] + except: + pass + + def method3(): + # Get ip by '' hostname . Not supported on all platforms. + try: + return socket.gethostbyname_ex('')[2] + except: + pass + + threads = [ + gevent.spawn(method1), + gevent.spawn(method2), + gevent.spawn(method3) + ] + + gevent.joinall(threads, timeout=5) + + local_ips = [] + for thread in threads: + if thread.value: + local_ips += thread.value + + # Delete duplicates + local_ips = list(set(local_ips)) + + + # Probably we looking for an ip starting with 192 + local_ips = sorted(local_ips, key=lambda a: a.startswith("192"), reverse=True) + + return local_ips + + +def _create_open_message(local_ip, + port, + description="UPnPPunch", + protocol="TCP", + upnp_schema='WANIPConnection'): + """ + Build a SOAP AddPortMapping message. + """ + + soap_message = """ + + + + + {port} + {protocol} + {port} + {host_ip} + 1 + {description} + 0 + + +""".format(port=port, + protocol=protocol, + host_ip=local_ip, + description=description, + upnp_schema=upnp_schema) + return (REMOVE_WHITESPACE.sub('><', soap_message), 'AddPortMapping') + + +def _create_close_message(local_ip, + port, + description=None, + protocol='TCP', + upnp_schema='WANIPConnection'): + soap_message = """ + + + + + {port} + {protocol} + + +""".format(port=port, + protocol=protocol, + upnp_schema=upnp_schema) + return (REMOVE_WHITESPACE.sub('><', soap_message), 'DeletePortMapping') + + +def _parse_for_errors(soap_response): + logger.debug(soap_response.status) + if soap_response.status >= 400: + response_data = soap_response.read() + logger.debug(response_data) + try: + err_dom = parseString(response_data) + err_code = _get_first_child_data(err_dom.getElementsByTagName( + 'errorCode')[0]) + err_msg = _get_first_child_data( + err_dom.getElementsByTagName('errorDescription')[0] + ) + except Exception as err: + raise IGDError( + 'Unable to parse SOAP error: {0}. Got: "{1}"'.format( + err, response_data)) + raise IGDError( + 'SOAP request error: {0} - {1}'.format(err_code, err_msg) + ) + return soap_response + + +def _send_soap_request(location, upnp_schema, control_path, soap_fn, + soap_message): + """ + Send out SOAP request to UPnP device and return a response. + """ + headers = { + 'SOAPAction': ( + '"urn:schemas-upnp-org:service:{schema}:' + '1#{fn_name}"'.format(schema=upnp_schema, fn_name=soap_fn) + ), + 'Content-Type': 'text/xml' + } + logger.debug("Sending UPnP request to {0}:{1}...".format( + location.hostname, location.port)) + conn = http.client.HTTPConnection(location.hostname, location.port) + conn.request('POST', control_path, soap_message, headers) + + response = conn.getresponse() + conn.close() + + return _parse_for_errors(response) + + +def _collect_idg_data(ip_addr): + idg_data = {} + idg_response = perform_m_search(ip_addr) + idg_data['location'] = _retrieve_location_from_ssdp(idg_response) + idg_data['control_path'], idg_data['upnp_schema'] = _parse_igd_profile( + _retrieve_igd_profile(idg_data['location'])) + return idg_data + + +def _send_requests(messages, location, upnp_schema, control_path): + responses = [_send_soap_request(location, upnp_schema, control_path, + message_tup[1], message_tup[0]) + for message_tup in messages] + + if all(rsp.status == 200 for rsp in responses): + return + raise UpnpError('Sending requests using UPnP failed.') + + +def _orchestrate_soap_request(ip, port, msg_fn, desc=None, protos=("TCP", "UDP")): + logger.debug("Trying using local ip: %s" % ip) + idg_data = _collect_idg_data(ip) + + soap_messages = [ + msg_fn(ip, port, desc, proto, idg_data['upnp_schema']) + for proto in protos + ] + + _send_requests(soap_messages, **idg_data) + + +def _communicate_with_igd(port=15441, + desc="UpnpPunch", + retries=3, + fn=_create_open_message, + protos=("TCP", "UDP")): + """ + Manage sending a message generated by 'fn'. + """ + + local_ips = _get_local_ips() + success = False + + def job(local_ip): + for retry in range(retries): + try: + _orchestrate_soap_request(local_ip, port, fn, desc, protos) + return True + except Exception as e: + logger.debug('Upnp request using "{0}" failed: {1}'.format(local_ip, e)) + gevent.sleep(1) + return False + + threads = [] + + for local_ip in local_ips: + job_thread = gevent.spawn(job, local_ip) + threads.append(job_thread) + gevent.sleep(0.1) + if any([thread.value for thread in threads]): + success = True + break + + # Wait another 10sec for competition or any positive result + for _ in range(10): + all_done = all([thread.value is not None for thread in threads]) + any_succeed = any([thread.value for thread in threads]) + if all_done or any_succeed: + break + gevent.sleep(1) + + if any([thread.value for thread in threads]): + success = True + + if not success: + raise UpnpError( + 'Failed to communicate with igd using port {0} on local machine after {1} tries.'.format( + port, retries)) + + return success + + +def ask_to_open_port(port=15441, desc="UpnpPunch", retries=3, protos=("TCP", "UDP")): + logger.debug("Trying to open port %d." % port) + return _communicate_with_igd(port=port, + desc=desc, + retries=retries, + fn=_create_open_message, + protos=protos) + + +def ask_to_close_port(port=15441, desc="UpnpPunch", retries=3, protos=("TCP", "UDP")): + logger.debug("Trying to close port %d." % port) + # retries=1 because multiple successes cause 500 response and failure + return _communicate_with_igd(port=port, + desc=desc, + retries=retries, + fn=_create_close_message, + protos=protos) + + +if __name__ == "__main__": + from gevent import monkey + monkey.patch_all() + logging.basicConfig(level=logging.DEBUG) + import time + + s = time.time() + print("Opening port...") + print("Success:", ask_to_open_port(15443, "ZeroNet", protos=["TCP"])) + print("Done in", time.time() - s) + + + print("Closing port...") + print("Success:", ask_to_close_port(15443, "ZeroNet", protos=["TCP"])) + print("Done in", time.time() - s) + diff --git a/src/util/__init__.py b/src/util/__init__.py new file mode 100644 index 00000000..f00c1459 --- /dev/null +++ b/src/util/__init__.py @@ -0,0 +1,5 @@ +from .Cached import Cached +from .CircularIterator import CircularIterator +from .Event import Event +from .Noparallel import Noparallel +from .Pooled import Pooled diff --git a/src/util/helper.py b/src/util/helper.py new file mode 100644 index 00000000..f44bcfce --- /dev/null +++ b/src/util/helper.py @@ -0,0 +1,357 @@ +import os +import stat +import socket +import struct +import re +import collections +import time +import logging +import base64 +import json + +import gevent + +from Config import config + + +def atomicWrite(dest, content, mode="wb"): + try: + with open(dest + "-tmpnew", mode) as f: + f.write(content) + f.flush() + os.fsync(f.fileno()) + if os.path.isfile(dest + "-tmpold"): # Previous incomplete write + os.rename(dest + "-tmpold", dest + "-tmpold-%s" % time.time()) + if os.path.isfile(dest): # Rename old file to -tmpold + os.rename(dest, dest + "-tmpold") + os.rename(dest + "-tmpnew", dest) + if os.path.isfile(dest + "-tmpold"): + os.unlink(dest + "-tmpold") # Remove old file + return True + except Exception as err: + from Debug import Debug + logging.error( + "File %s write failed: %s, (%s) reverting..." % + (dest, Debug.formatException(err), Debug.formatStack()) + ) + if os.path.isfile(dest + "-tmpold") and not os.path.isfile(dest): + os.rename(dest + "-tmpold", dest) + return False + + +def jsonDumps(data): + content = json.dumps(data, indent=1, sort_keys=True) + + # Make it a little more compact by removing unnecessary white space + def compact_dict(match): + if "\n" in match.group(0): + return match.group(0).replace(match.group(1), match.group(1).strip()) + else: + return match.group(0) + + content = re.sub(r"\{(\n[^,\[\{]{10,100000}?)\}[, ]{0,2}\n", compact_dict, content, flags=re.DOTALL) + + def compact_list(match): + if "\n" in match.group(0): + stripped_lines = re.sub("\n[ ]*", "", match.group(1)) + return match.group(0).replace(match.group(1), stripped_lines) + else: + return match.group(0) + + content = re.sub(r"\[([^\[\{]{2,100000}?)\][, ]{0,2}\n", compact_list, content, flags=re.DOTALL) + + # Remove end of line whitespace + content = re.sub(r"(?m)[ ]+$", "", content) + return content + + +def openLocked(path, mode="wb"): + try: + if os.name == "posix": + import fcntl + f = open(path, mode) + fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB) + elif os.name == "nt": + import msvcrt + f = open(path, mode) + msvcrt.locking(f.fileno(), msvcrt.LK_NBLCK, 1) + else: + f = open(path, mode) + except (IOError, PermissionError, BlockingIOError) as err: + raise BlockingIOError("Unable to lock file: %s" % err) + return f + + +def getFreeSpace(): + free_space = -1 + if "statvfs" in dir(os): # Unix + statvfs = os.statvfs(config.data_dir.encode("utf8")) + free_space = statvfs.f_frsize * statvfs.f_bavail + else: # Windows + try: + import ctypes + free_space_pointer = ctypes.c_ulonglong(0) + ctypes.windll.kernel32.GetDiskFreeSpaceExW( + ctypes.c_wchar_p(config.data_dir), None, None, ctypes.pointer(free_space_pointer) + ) + free_space = free_space_pointer.value + except Exception as err: + logging.error("GetFreeSpace error: %s" % err) + return free_space + + +def sqlquote(value): + if type(value) is int: + return str(value) + else: + return "'%s'" % value.replace("'", "''") + + +def shellquote(*args): + if len(args) == 1: + return '"%s"' % args[0].replace('"', "") + else: + return tuple(['"%s"' % arg.replace('"', "") for arg in args]) + + +def packPeers(peers): + packed_peers = {"ipv4": [], "ipv6": [], "onion": []} + for peer in peers: + try: + ip_type = getIpType(peer.ip) + if ip_type in packed_peers: + packed_peers[ip_type].append(peer.packMyAddress()) + except Exception: + logging.debug("Error packing peer address: %s" % peer) + return packed_peers + + +# ip, port to packed 6byte or 18byte format +def packAddress(ip, port): + if ":" in ip: + return socket.inet_pton(socket.AF_INET6, ip) + struct.pack("H", port) + else: + return socket.inet_aton(ip) + struct.pack("H", port) + + +# From 6byte or 18byte format to ip, port +def unpackAddress(packed): + if len(packed) == 18: + return socket.inet_ntop(socket.AF_INET6, packed[0:16]), struct.unpack_from("H", packed, 16)[0] + else: + if len(packed) != 6: + raise Exception("Invalid length ip4 packed address: %s" % len(packed)) + return socket.inet_ntoa(packed[0:4]), struct.unpack_from("H", packed, 4)[0] + + +# onion, port to packed 12byte format +def packOnionAddress(onion, port): + onion = onion.replace(".onion", "") + return base64.b32decode(onion.upper()) + struct.pack("H", port) + + +# From 12byte format to ip, port +def unpackOnionAddress(packed): + return base64.b32encode(packed[0:-2]).lower().decode() + ".onion", struct.unpack("H", packed[-2:])[0] + + +# Get dir from file +# Return: data/site/content.json -> data/site/ +def getDirname(path): + if "/" in path: + return path[:path.rfind("/") + 1].lstrip("/") + else: + return "" + + +# Get dir from file +# Return: data/site/content.json -> content.json +def getFilename(path): + return path[path.rfind("/") + 1:] + + +def getFilesize(path): + try: + s = os.stat(path) + except Exception: + return None + if stat.S_ISREG(s.st_mode): # Test if it's file + return s.st_size + else: + return None + + +# Convert hash to hashid for hashfield +def toHashId(hash): + return int(hash[0:4], 16) + + +# Merge dict values +def mergeDicts(dicts): + back = collections.defaultdict(set) + for d in dicts: + for key, val in d.items(): + back[key].update(val) + return dict(back) + + +# Request https url using gevent SSL error workaround +def httpRequest(url, as_file=False): + if url.startswith("http://"): + import urllib.request + response = urllib.request.urlopen(url) + else: # Hack to avoid Python gevent ssl errors + import socket + import http.client + import ssl + + host, request = re.match("https://(.*?)(/.*?)$", url).groups() + + conn = http.client.HTTPSConnection(host) + sock = socket.create_connection((conn.host, conn.port), conn.timeout, conn.source_address) + conn.sock = ssl.wrap_socket(sock, conn.key_file, conn.cert_file) + conn.request("GET", request) + response = conn.getresponse() + if response.status in [301, 302, 303, 307, 308]: + logging.info("Redirect to: %s" % response.getheader('Location')) + response = httpRequest(response.getheader('Location')) + + if as_file: + import io + data = io.BytesIO() + while True: + buff = response.read(1024 * 16) + if not buff: + break + data.write(buff) + return data + else: + return response + + +def timerCaller(secs, func, *args, **kwargs): + gevent.spawn_later(secs, timerCaller, secs, func, *args, **kwargs) + func(*args, **kwargs) + + +def timer(secs, func, *args, **kwargs): + return gevent.spawn_later(secs, timerCaller, secs, func, *args, **kwargs) + + +def create_connection(address, timeout=None, source_address=None): + if address in config.ip_local: + sock = socket.create_connection_original(address, timeout, source_address) + else: + sock = socket.create_connection_original(address, timeout, socket.bind_addr) + return sock + + +def socketBindMonkeyPatch(bind_ip, bind_port): + import socket + logging.info("Monkey patching socket to bind to: %s:%s" % (bind_ip, bind_port)) + socket.bind_addr = (bind_ip, int(bind_port)) + socket.create_connection_original = socket.create_connection + socket.create_connection = create_connection + + +def limitedGzipFile(*args, **kwargs): + import gzip + + class LimitedGzipFile(gzip.GzipFile): + def read(self, size=-1): + return super(LimitedGzipFile, self).read(1024 * 1024 * 25) + return LimitedGzipFile(*args, **kwargs) + + +def avg(items): + if len(items) > 0: + return sum(items) / len(items) + else: + return 0 + + +def isIp(ip): + if ":" in ip: # IPv6 + try: + socket.inet_pton(socket.AF_INET6, ip) + return True + except Exception: + return False + + else: # IPv4 + try: + socket.inet_aton(ip) + return True + except Exception: + return False + + +local_ip_pattern = re.compile(r"^127\.|192\.168\.|10\.|172\.1[6-9]\.|172\.2[0-9]\.|172\.3[0-1]\.|169\.254\.|::1$|fe80") +def isPrivateIp(ip): + return local_ip_pattern.match(ip) + +# XXX: Deprecated. Use ConnectionServer.getIpType() instead. +# To be removed in 0.9.0 +def getIpType(ip): + if ip.endswith(".onion"): + return "onion" + elif ":" in ip: + return "ipv6" + elif re.match(r"[0-9\.]+$", ip): + return "ipv4" + else: + return "unknown" + + +def createSocket(ip, sock_type=socket.SOCK_STREAM): + ip_type = getIpType(ip) + if ip_type == "ipv6": + return socket.socket(socket.AF_INET6, sock_type) + else: + return socket.socket(socket.AF_INET, sock_type) + + +def getInterfaceIps(ip_type="ipv4"): + res = [] + if ip_type == "ipv6": + test_ips = ["ff0e::c", "2606:4700:4700::1111"] + else: + test_ips = ['239.255.255.250', "8.8.8.8"] + + for test_ip in test_ips: + try: + s = createSocket(test_ip, sock_type=socket.SOCK_DGRAM) + s.connect((test_ip, 1)) + res.append(s.getsockname()[0]) + except Exception: + pass + + try: + res += [ip[4][0] for ip in socket.getaddrinfo(socket.gethostname(), 1)] + except Exception: + pass + + res = [re.sub("%.*", "", ip) for ip in res if getIpType(ip) == ip_type and isIp(ip)] + return list(set(res)) + + +def cmp(a, b): + return (a > b) - (a < b) + + +def encodeResponse(func): # Encode returned data from utf8 to bytes + def wrapper(*args, **kwargs): + back = func(*args, **kwargs) + if "__next__" in dir(back): + for part in back: + if type(part) == bytes: + yield part + else: + yield part.encode() + else: + if type(back) == bytes: + yield back + else: + yield back.encode() + + return wrapper