diff --git a/src/Config.py b/src/Config.py
deleted file mode 100644
index 7095975b..00000000
--- a/src/Config.py
+++ /dev/null
@@ -1,673 +0,0 @@
-import argparse
-import sys
-import os
-import locale
-import re
-import configparser
-import logging
-import logging.handlers
-import stat
-import time
-
-
-class Config(object):
-
- def __init__(self, argv):
- self.version = "0.7.2"
- self.rev = 4555
- self.argv = argv
- self.action = None
- self.test_parser = None
- self.pending_changes = {}
- self.need_restart = False
- self.keys_api_change_allowed = set([
- "tor", "fileserver_port", "language", "tor_use_bridges", "trackers_proxy", "trackers",
- "trackers_file", "open_browser", "log_level", "fileserver_ip_type", "ip_external", "offline",
- "threads_fs_read", "threads_fs_write", "threads_crypt", "threads_db"
- ])
- self.keys_restart_need = set([
- "tor", "fileserver_port", "fileserver_ip_type", "threads_fs_read", "threads_fs_write", "threads_crypt", "threads_db"
- ])
- self.start_dir = self.getStartDir()
-
- self.config_file = self.start_dir + "/zeronet.conf"
- self.data_dir = self.start_dir + "/data"
- self.log_dir = self.start_dir + "/log"
- self.openssl_lib_file = None
- self.openssl_bin_file = None
-
- self.trackers_file = False
- self.createParser()
- self.createArguments()
-
- def createParser(self):
- # Create parser
- self.parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
- self.parser.register('type', 'bool', self.strToBool)
- self.subparsers = self.parser.add_subparsers(title="Action to perform", dest="action")
-
- def __str__(self):
- return str(self.arguments).replace("Namespace", "Config") # Using argparse str output
-
- # Convert string to bool
- def strToBool(self, v):
- return v.lower() in ("yes", "true", "t", "1")
-
- def getStartDir(self):
- this_file = os.path.abspath(__file__).replace("\\", "/").rstrip("cd")
-
- if "--start_dir" in self.argv:
- start_dir = self.argv[self.argv.index("--start_dir") + 1]
- elif this_file.endswith("/Contents/Resources/core/src/Config.py"):
- # Running as ZeroNet.app
- if this_file.startswith("/Application") or this_file.startswith("/private") or this_file.startswith(os.path.expanduser("~/Library")):
- # Runnig from non-writeable directory, put data to Application Support
- start_dir = os.path.expanduser("~/Library/Application Support/ZeroNet")
- else:
- # Running from writeable directory put data next to .app
- start_dir = re.sub("/[^/]+/Contents/Resources/core/src/Config.py", "", this_file)
- elif this_file.endswith("/core/src/Config.py"):
- # Running as exe or source is at Application Support directory, put var files to outside of core dir
- start_dir = this_file.replace("/core/src/Config.py", "")
- elif this_file.endswith("usr/share/zeronet/src/Config.py"):
- # Running from non-writeable location, e.g., AppImage
- start_dir = os.path.expanduser("~/ZeroNet")
- else:
- start_dir = "."
-
- return start_dir
-
- # Create command line arguments
- def createArguments(self):
- trackers = [
- "zero://boot3rdez4rzn36x.onion:15441",
- "zero://zero.booth.moe#f36ca555bee6ba216b14d10f38c16f7769ff064e0e37d887603548cc2e64191d:443", # US/NY
- "udp://tracker.coppersurfer.tk:6969", # DE
- "udp://104.238.198.186:8000", # US/LA
- "udp://retracker.akado-ural.ru:80", # RU
- "http://h4.trakx.nibba.trade:80/announce", # US/VA
- "http://open.acgnxtracker.com:80/announce", # DE
- "http://tracker.bt4g.com:2095/announce", # Cloudflare
- "zero://2602:ffc5::c5b2:5360:26312" # US/ATL
- ]
- # Platform specific
- if sys.platform.startswith("win"):
- coffeescript = "type %s | tools\\coffee\\coffee.cmd"
- else:
- coffeescript = None
-
- try:
- language, enc = locale.getdefaultlocale()
- language = language.lower().replace("_", "-")
- if language not in ["pt-br", "zh-tw"]:
- language = language.split("-")[0]
- except Exception:
- language = "en"
-
- use_openssl = True
-
- if repr(1483108852.565) != "1483108852.565": # Fix for weird Android issue
- fix_float_decimals = True
- else:
- fix_float_decimals = False
-
- config_file = self.start_dir + "/zeronet.conf"
- data_dir = self.start_dir + "/data"
- log_dir = self.start_dir + "/log"
-
- ip_local = ["127.0.0.1", "::1"]
-
- # Main
- action = self.subparsers.add_parser("main", help='Start UiServer and FileServer (default)')
-
- # SiteCreate
- action = self.subparsers.add_parser("siteCreate", help='Create a new site')
- action.register('type', 'bool', self.strToBool)
- action.add_argument('--use_master_seed', help="Allow created site's private key to be recovered using the master seed in users.json (default: True)", type="bool", choices=[True, False], default=True)
-
- # SiteNeedFile
- action = self.subparsers.add_parser("siteNeedFile", help='Get a file from site')
- action.add_argument('address', help='Site address')
- action.add_argument('inner_path', help='File inner path')
-
- # SiteDownload
- action = self.subparsers.add_parser("siteDownload", help='Download a new site')
- action.add_argument('address', help='Site address')
-
- # SiteSign
- action = self.subparsers.add_parser("siteSign", help='Update and sign content.json: address [privatekey]')
- action.add_argument('address', help='Site to sign')
- action.add_argument('privatekey', help='Private key (default: ask on execute)', nargs='?')
- action.add_argument('--inner_path', help='File you want to sign (default: content.json)',
- default="content.json", metavar="inner_path")
- action.add_argument('--remove_missing_optional', help='Remove optional files that is not present in the directory', action='store_true')
- action.add_argument('--publish', help='Publish site after the signing', action='store_true')
-
- # SitePublish
- action = self.subparsers.add_parser("sitePublish", help='Publish site to other peers: address')
- action.add_argument('address', help='Site to publish')
- action.add_argument('peer_ip', help='Peer ip to publish (default: random peers ip from tracker)',
- default=None, nargs='?')
- action.add_argument('peer_port', help='Peer port to publish (default: random peer port from tracker)',
- default=15441, nargs='?')
- action.add_argument('--inner_path', help='Content.json you want to publish (default: content.json)',
- default="content.json", metavar="inner_path")
-
- # SiteVerify
- action = self.subparsers.add_parser("siteVerify", help='Verify site files using sha512: address')
- action.add_argument('address', help='Site to verify')
-
- # SiteCmd
- action = self.subparsers.add_parser("siteCmd", help='Execute a ZeroFrame API command on a site')
- action.add_argument('address', help='Site address')
- action.add_argument('cmd', help='API command name')
- action.add_argument('parameters', help='Parameters of the command', nargs='?')
-
- # dbRebuild
- action = self.subparsers.add_parser("dbRebuild", help='Rebuild site database cache')
- action.add_argument('address', help='Site to rebuild')
-
- # dbQuery
- action = self.subparsers.add_parser("dbQuery", help='Query site sql cache')
- action.add_argument('address', help='Site to query')
- action.add_argument('query', help='Sql query')
-
- # PeerPing
- action = self.subparsers.add_parser("peerPing", help='Send Ping command to peer')
- action.add_argument('peer_ip', help='Peer ip')
- action.add_argument('peer_port', help='Peer port', nargs='?')
-
- # PeerGetFile
- action = self.subparsers.add_parser("peerGetFile", help='Request and print a file content from peer')
- action.add_argument('peer_ip', help='Peer ip')
- action.add_argument('peer_port', help='Peer port')
- action.add_argument('site', help='Site address')
- action.add_argument('filename', help='File name to request')
- action.add_argument('--benchmark', help='Request file 10x then displays the total time', action='store_true')
-
- # PeerCmd
- action = self.subparsers.add_parser("peerCmd", help='Request and print a file content from peer')
- action.add_argument('peer_ip', help='Peer ip')
- action.add_argument('peer_port', help='Peer port')
- action.add_argument('cmd', help='Command to execute')
- action.add_argument('parameters', help='Parameters to command', nargs='?')
-
- # CryptSign
- action = self.subparsers.add_parser("cryptSign", help='Sign message using Bitcoin private key')
- action.add_argument('message', help='Message to sign')
- action.add_argument('privatekey', help='Private key')
-
- # Crypt Verify
- action = self.subparsers.add_parser("cryptVerify", help='Verify message using Bitcoin public address')
- action.add_argument('message', help='Message to verify')
- action.add_argument('sign', help='Signiture for message')
- action.add_argument('address', help='Signer\'s address')
-
- # Crypt GetPrivatekey
- action = self.subparsers.add_parser("cryptGetPrivatekey", help='Generate a privatekey from master seed')
- action.add_argument('master_seed', help='Source master seed')
- action.add_argument('site_address_index', help='Site address index', type=int)
-
- action = self.subparsers.add_parser("getConfig", help='Return json-encoded info')
- action = self.subparsers.add_parser("testConnection", help='Testing')
- action = self.subparsers.add_parser("testAnnounce", help='Testing')
-
- self.test_parser = self.subparsers.add_parser("test", help='Run a test')
- self.test_parser.add_argument('test_name', help='Test name', nargs="?")
- # self.test_parser.add_argument('--benchmark', help='Run the tests multiple times to measure the performance', action='store_true')
-
- # Config parameters
- self.parser.add_argument('--verbose', help='More detailed logging', action='store_true')
- self.parser.add_argument('--debug', help='Debug mode', action='store_true')
- self.parser.add_argument('--silent', help='Only log errors to terminal output', action='store_true')
- self.parser.add_argument('--debug_socket', help='Debug socket connections', action='store_true')
- self.parser.add_argument('--merge_media', help='Merge all.js and all.css', action='store_true')
-
- self.parser.add_argument('--batch', help="Batch mode (No interactive input for commands)", action='store_true')
-
- self.parser.add_argument('--start_dir', help='Path of working dir for variable content (data, log, .conf)', default=self.start_dir, metavar="path")
- self.parser.add_argument('--config_file', help='Path of config file', default=config_file, metavar="path")
- self.parser.add_argument('--data_dir', help='Path of data directory', default=data_dir, metavar="path")
-
- self.parser.add_argument('--console_log_level', help='Level of logging to console', default="default", choices=["default", "DEBUG", "INFO", "ERROR", "off"])
-
- self.parser.add_argument('--log_dir', help='Path of logging directory', default=log_dir, metavar="path")
- self.parser.add_argument('--log_level', help='Level of logging to file', default="DEBUG", choices=["DEBUG", "INFO", "ERROR", "off"])
- self.parser.add_argument('--log_rotate', help='Log rotate interval', default="daily", choices=["hourly", "daily", "weekly", "off"])
- self.parser.add_argument('--log_rotate_backup_count', help='Log rotate backup count', default=5, type=int)
-
- self.parser.add_argument('--language', help='Web interface language', default=language, metavar='language')
- self.parser.add_argument('--ui_ip', help='Web interface bind address', default="127.0.0.1", metavar='ip')
- self.parser.add_argument('--ui_port', help='Web interface bind port', default=43110, type=int, metavar='port')
- self.parser.add_argument('--ui_restrict', help='Restrict web access', default=False, metavar='ip', nargs='*')
- self.parser.add_argument('--ui_host', help='Allow access using this hosts', metavar='host', nargs='*')
- self.parser.add_argument('--ui_trans_proxy', help='Allow access using a transparent proxy', action='store_true')
-
- self.parser.add_argument('--open_browser', help='Open homepage in web browser automatically',
- nargs='?', const="default_browser", metavar='browser_name')
- self.parser.add_argument('--homepage', help='Web interface Homepage', default='1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D',
- metavar='address')
- self.parser.add_argument('--updatesite', help='Source code update site', default='1uPDaT3uSyWAPdCv1WkMb5hBQjWSNNACf',
- metavar='address')
- self.parser.add_argument('--dist_type', help='Type of installed distribution', default='source')
-
- self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, type=int, metavar='limit')
- self.parser.add_argument('--file_size_limit', help='Maximum per file size limit in MB', default=10, type=int, metavar='limit')
- self.parser.add_argument('--connected_limit', help='Max connected peer per site', default=8, type=int, metavar='connected_limit')
- self.parser.add_argument('--global_connected_limit', help='Max connections', default=512, type=int, metavar='global_connected_limit')
- self.parser.add_argument('--workers', help='Download workers per site', default=5, type=int, metavar='workers')
-
- self.parser.add_argument('--fileserver_ip', help='FileServer bind address', default="*", metavar='ip')
- self.parser.add_argument('--fileserver_port', help='FileServer bind port (0: randomize)', default=0, type=int, metavar='port')
- self.parser.add_argument('--fileserver_port_range', help='FileServer randomization range', default="10000-40000", metavar='port')
- self.parser.add_argument('--fileserver_ip_type', help='FileServer ip type', default="dual", choices=["ipv4", "ipv6", "dual"])
- self.parser.add_argument('--ip_local', help='My local ips', default=ip_local, type=int, metavar='ip', nargs='*')
- self.parser.add_argument('--ip_external', help='Set reported external ip (tested on start if None)', metavar='ip', nargs='*')
- self.parser.add_argument('--offline', help='Disable network communication', action='store_true')
-
- self.parser.add_argument('--disable_udp', help='Disable UDP connections', action='store_true')
- self.parser.add_argument('--proxy', help='Socks proxy address', metavar='ip:port')
- self.parser.add_argument('--bind', help='Bind outgoing sockets to this address', metavar='ip')
- self.parser.add_argument('--trackers', help='Bootstraping torrent trackers', default=trackers, metavar='protocol://address', nargs='*')
- self.parser.add_argument('--trackers_file', help='Load torrent trackers dynamically from a file', metavar='path', nargs='*')
- self.parser.add_argument('--trackers_proxy', help='Force use proxy to connect to trackers (disable, tor, ip:port)', default="disable")
- self.parser.add_argument('--use_libsecp256k1', help='Use Libsecp256k1 liblary for speedup', type='bool', choices=[True, False], default=True)
- self.parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup', type='bool', choices=[True, False], default=True)
- self.parser.add_argument('--openssl_lib_file', help='Path for OpenSSL library file (default: detect)', default=argparse.SUPPRESS, metavar="path")
- self.parser.add_argument('--openssl_bin_file', help='Path for OpenSSL binary file (default: detect)', default=argparse.SUPPRESS, metavar="path")
- self.parser.add_argument('--disable_db', help='Disable database updating', action='store_true')
- self.parser.add_argument('--disable_encryption', help='Disable connection encryption', action='store_true')
- self.parser.add_argument('--force_encryption', help="Enforce encryption to all peer connections", action='store_true')
- self.parser.add_argument('--disable_sslcompression', help='Disable SSL compression to save memory',
- type='bool', choices=[True, False], default=True)
- self.parser.add_argument('--keep_ssl_cert', help='Disable new SSL cert generation on startup', action='store_true')
- self.parser.add_argument('--max_files_opened', help='Change maximum opened files allowed by OS to this value on startup',
- default=2048, type=int, metavar='limit')
- self.parser.add_argument('--stack_size', help='Change thread stack size', default=None, type=int, metavar='thread_stack_size')
- self.parser.add_argument('--use_tempfiles', help='Use temporary files when downloading (experimental)',
- type='bool', choices=[True, False], default=False)
- self.parser.add_argument('--stream_downloads', help='Stream download directly to files (experimental)',
- type='bool', choices=[True, False], default=False)
- self.parser.add_argument("--msgpack_purepython", help='Use less memory, but a bit more CPU power',
- type='bool', choices=[True, False], default=False)
- self.parser.add_argument("--fix_float_decimals", help='Fix content.json modification date float precision on verification',
- type='bool', choices=[True, False], default=fix_float_decimals)
- self.parser.add_argument("--db_mode", choices=["speed", "security"], default="speed")
-
- self.parser.add_argument('--threads_fs_read', help='Number of threads for file read operations', default=1, type=int)
- self.parser.add_argument('--threads_fs_write', help='Number of threads for file write operations', default=1, type=int)
- self.parser.add_argument('--threads_crypt', help='Number of threads for cryptographic operations', default=2, type=int)
- self.parser.add_argument('--threads_db', help='Number of threads for database operations', default=1, type=int)
-
- self.parser.add_argument("--download_optional", choices=["manual", "auto"], default="manual")
-
- self.parser.add_argument('--coffeescript_compiler', help='Coffeescript compiler for developing', default=coffeescript,
- metavar='executable_path')
-
- self.parser.add_argument('--tor', help='enable: Use only for Tor peers, always: Use Tor for every connection', choices=["disable", "enable", "always"], default='enable')
- self.parser.add_argument('--tor_controller', help='Tor controller address', metavar='ip:port', default='127.0.0.1:9051')
- self.parser.add_argument('--tor_proxy', help='Tor proxy address', metavar='ip:port', default='127.0.0.1:9050')
- self.parser.add_argument('--tor_password', help='Tor controller password', metavar='password')
- self.parser.add_argument('--tor_use_bridges', help='Use obfuscated bridge relays to avoid Tor block', action='store_true')
- self.parser.add_argument('--tor_hs_limit', help='Maximum number of hidden services in Tor always mode', metavar='limit', type=int, default=10)
- self.parser.add_argument('--tor_hs_port', help='Hidden service port in Tor always mode', metavar='limit', type=int, default=15441)
-
- self.parser.add_argument('--version', action='version', version='ZeroNet %s r%s' % (self.version, self.rev))
- self.parser.add_argument('--end', help='Stop multi value argument parsing', action='store_true')
-
- return self.parser
-
- def loadTrackersFile(self):
- if not self.trackers_file:
- return None
-
- self.trackers = self.arguments.trackers[:]
-
- for trackers_file in self.trackers_file:
- try:
- if trackers_file.startswith("/"): # Absolute
- trackers_file_path = trackers_file
- elif trackers_file.startswith("{data_dir}"): # Relative to data_dir
- trackers_file_path = trackers_file.replace("{data_dir}", self.data_dir)
- else: # Relative to zeronet.py
- trackers_file_path = self.start_dir + "/" + trackers_file
-
- for line in open(trackers_file_path):
- tracker = line.strip()
- if "://" in tracker and tracker not in self.trackers:
- self.trackers.append(tracker)
- except Exception as err:
- print("Error loading trackers file: %s" % err)
-
- # Find arguments specified for current action
- def getActionArguments(self):
- back = {}
- arguments = self.parser._subparsers._group_actions[0].choices[self.action]._actions[1:] # First is --version
- for argument in arguments:
- back[argument.dest] = getattr(self, argument.dest)
- return back
-
- # Try to find action from argv
- def getAction(self, argv):
- actions = [list(action.choices.keys()) for action in self.parser._actions if action.dest == "action"][0] # Valid actions
- found_action = False
- for action in actions: # See if any in argv
- if action in argv:
- found_action = action
- break
- return found_action
-
- # Move plugin parameters to end of argument list
- def moveUnknownToEnd(self, argv, default_action):
- valid_actions = sum([action.option_strings for action in self.parser._actions], [])
- valid_parameters = []
- plugin_parameters = []
- plugin = False
- for arg in argv:
- if arg.startswith("--"):
- if arg not in valid_actions:
- plugin = True
- else:
- plugin = False
- elif arg == default_action:
- plugin = False
-
- if plugin:
- plugin_parameters.append(arg)
- else:
- valid_parameters.append(arg)
- return valid_parameters + plugin_parameters
-
- def getParser(self, argv):
- action = self.getAction(argv)
- if not action:
- return self.parser
- else:
- return self.subparsers.choices[action]
-
- # Parse arguments from config file and command line
- def parse(self, silent=False, parse_config=True):
- argv = self.argv[:] # Copy command line arguments
- current_parser = self.getParser(argv)
- if silent: # Don't display messages or quit on unknown parameter
- original_print_message = self.parser._print_message
- original_exit = self.parser.exit
-
- def silencer(parser, function_name):
- parser.exited = True
- return None
- current_parser.exited = False
- current_parser._print_message = lambda *args, **kwargs: silencer(current_parser, "_print_message")
- current_parser.exit = lambda *args, **kwargs: silencer(current_parser, "exit")
-
- self.parseCommandline(argv, silent) # Parse argv
- self.setAttributes()
- if parse_config:
- argv = self.parseConfig(argv) # Add arguments from config file
-
- self.parseCommandline(argv, silent) # Parse argv
- self.setAttributes()
-
- if not silent:
- if self.fileserver_ip != "*" and self.fileserver_ip not in self.ip_local:
- self.ip_local.append(self.fileserver_ip)
-
- if silent: # Restore original functions
- if current_parser.exited and self.action == "main": # Argument parsing halted, don't start ZeroNet with main action
- self.action = None
- current_parser._print_message = original_print_message
- current_parser.exit = original_exit
-
- self.loadTrackersFile()
-
- # Parse command line arguments
- def parseCommandline(self, argv, silent=False):
- # Find out if action is specificed on start
- action = self.getAction(argv)
- if not action:
- argv.append("--end")
- argv.append("main")
- action = "main"
- argv = self.moveUnknownToEnd(argv, action)
- if silent:
- res = self.parser.parse_known_args(argv[1:])
- if res:
- self.arguments = res[0]
- else:
- self.arguments = {}
- else:
- self.arguments = self.parser.parse_args(argv[1:])
-
- # Parse config file
- def parseConfig(self, argv):
- # Find config file path from parameters
- if "--config_file" in argv:
- self.config_file = argv[argv.index("--config_file") + 1]
- # Load config file
- if os.path.isfile(self.config_file):
- config = configparser.RawConfigParser(allow_no_value=True, strict=False)
- config.read(self.config_file)
- for section in config.sections():
- for key, val in config.items(section):
- if val == "True":
- val = None
- if section != "global": # If not global prefix key with section
- key = section + "_" + key
-
- if key == "open_browser": # Prefer config file value over cli argument
- while "--%s" % key in argv:
- pos = argv.index("--open_browser")
- del argv[pos:pos + 2]
-
- argv_extend = ["--%s" % key]
- if val:
- for line in val.strip().split("\n"): # Allow multi-line values
- argv_extend.append(line)
- if "\n" in val:
- argv_extend.append("--end")
-
- argv = argv[:1] + argv_extend + argv[1:]
- return argv
-
- # Return command line value of given argument
- def getCmdlineValue(self, key):
- if key not in self.argv:
- return None
- argv_index = self.argv.index(key)
- if argv_index == len(self.argv) - 1: # last arg, test not specified
- return None
-
- return self.argv[argv_index + 1]
-
- # Expose arguments as class attributes
- def setAttributes(self):
- # Set attributes from arguments
- if self.arguments:
- args = vars(self.arguments)
- for key, val in args.items():
- if type(val) is list:
- val = val[:]
- if key in ("data_dir", "log_dir", "start_dir", "openssl_bin_file", "openssl_lib_file"):
- if val:
- val = val.replace("\\", "/")
- setattr(self, key, val)
-
- def loadPlugins(self):
- from Plugin import PluginManager
-
- @PluginManager.acceptPlugins
- class ConfigPlugin(object):
- def __init__(self, config):
- self.argv = config.argv
- self.parser = config.parser
- self.subparsers = config.subparsers
- self.test_parser = config.test_parser
- self.getCmdlineValue = config.getCmdlineValue
- self.createArguments()
-
- def createArguments(self):
- pass
-
- ConfigPlugin(self)
-
- def saveValue(self, key, value):
- if not os.path.isfile(self.config_file):
- content = ""
- else:
- content = open(self.config_file).read()
- lines = content.splitlines()
-
- global_line_i = None
- key_line_i = None
- i = 0
- for line in lines:
- if line.strip() == "[global]":
- global_line_i = i
- if line.startswith(key + " =") or line == key:
- key_line_i = i
- i += 1
-
- if key_line_i and len(lines) > key_line_i + 1:
- while True: # Delete previous multiline values
- is_value_line = lines[key_line_i + 1].startswith(" ") or lines[key_line_i + 1].startswith("\t")
- if not is_value_line:
- break
- del lines[key_line_i + 1]
-
- if value is None: # Delete line
- if key_line_i:
- del lines[key_line_i]
-
- else: # Add / update
- if type(value) is list:
- value_lines = [""] + [str(line).replace("\n", "").replace("\r", "") for line in value]
- else:
- value_lines = [str(value).replace("\n", "").replace("\r", "")]
- new_line = "%s = %s" % (key, "\n ".join(value_lines))
- if key_line_i: # Already in the config, change the line
- lines[key_line_i] = new_line
- elif global_line_i is None: # No global section yet, append to end of file
- lines.append("[global]")
- lines.append(new_line)
- else: # Has global section, append the line after it
- lines.insert(global_line_i + 1, new_line)
-
- open(self.config_file, "w").write("\n".join(lines))
-
- def getServerInfo(self):
- from Plugin import PluginManager
- import main
-
- info = {
- "platform": sys.platform,
- "fileserver_ip": self.fileserver_ip,
- "fileserver_port": self.fileserver_port,
- "ui_ip": self.ui_ip,
- "ui_port": self.ui_port,
- "version": self.version,
- "rev": self.rev,
- "language": self.language,
- "debug": self.debug,
- "plugins": PluginManager.plugin_manager.plugin_names,
-
- "log_dir": os.path.abspath(self.log_dir),
- "data_dir": os.path.abspath(self.data_dir),
- "src_dir": os.path.dirname(os.path.abspath(__file__))
- }
-
- try:
- info["ip_external"] = main.file_server.port_opened
- info["tor_enabled"] = main.file_server.tor_manager.enabled
- info["tor_status"] = main.file_server.tor_manager.status
- except Exception:
- pass
-
- return info
-
- def initConsoleLogger(self):
- if self.action == "main":
- format = '[%(asctime)s] %(name)s %(message)s'
- else:
- format = '%(name)s %(message)s'
-
- if self.console_log_level == "default":
- if self.silent:
- level = logging.ERROR
- elif self.debug:
- level = logging.DEBUG
- else:
- level = logging.INFO
- else:
- level = logging.getLevelName(self.console_log_level)
-
- console_logger = logging.StreamHandler()
- console_logger.setFormatter(logging.Formatter(format, "%H:%M:%S"))
- console_logger.setLevel(level)
- logging.getLogger('').addHandler(console_logger)
-
- def initFileLogger(self):
- if self.action == "main":
- log_file_path = "%s/debug.log" % self.log_dir
- else:
- log_file_path = "%s/cmd.log" % self.log_dir
-
- if self.log_rotate == "off":
- file_logger = logging.FileHandler(log_file_path, "w", "utf-8")
- else:
- when_names = {"weekly": "w", "daily": "d", "hourly": "h"}
- file_logger = logging.handlers.TimedRotatingFileHandler(
- log_file_path, when=when_names[self.log_rotate], interval=1, backupCount=self.log_rotate_backup_count,
- encoding="utf8"
- )
-
- if os.path.isfile(log_file_path):
- file_logger.doRollover() # Always start with empty log file
- file_logger.setFormatter(logging.Formatter('[%(asctime)s] %(levelname)-8s %(name)s %(message)s'))
- file_logger.setLevel(logging.getLevelName(self.log_level))
- logging.getLogger('').setLevel(logging.getLevelName(self.log_level))
- logging.getLogger('').addHandler(file_logger)
-
- def initLogging(self, console_logging=None, file_logging=None):
- if console_logging == None:
- console_logging = self.console_log_level != "off"
-
- if file_logging == None:
- file_logging = self.log_level != "off"
-
- # Create necessary files and dirs
- if not os.path.isdir(self.log_dir):
- os.mkdir(self.log_dir)
- try:
- os.chmod(self.log_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
- except Exception as err:
- print("Can't change permission of %s: %s" % (self.log_dir, err))
-
- # Make warning hidden from console
- logging.WARNING = 15 # Don't display warnings if not in debug mode
- logging.addLevelName(15, "WARNING")
-
- logging.getLogger('').name = "-" # Remove root prefix
-
- self.error_logger = ErrorLogHandler()
- self.error_logger.setLevel(logging.getLevelName("ERROR"))
- logging.getLogger('').addHandler(self.error_logger)
-
- if console_logging:
- self.initConsoleLogger()
- if file_logging:
- self.initFileLogger()
-
-
-class ErrorLogHandler(logging.StreamHandler):
- def __init__(self):
- self.lines = []
- return super().__init__()
-
- def emit(self, record):
- self.lines.append([time.time(), record.levelname, self.format(record)])
-
- def onNewRecord(self, record):
- pass
-
-
-config = Config(sys.argv)
diff --git a/src/Connection/Connection.py b/src/Connection/Connection.py
deleted file mode 100644
index 22bcf29c..00000000
--- a/src/Connection/Connection.py
+++ /dev/null
@@ -1,635 +0,0 @@
-import socket
-import time
-
-import gevent
-try:
- from gevent.coros import RLock
-except:
- from gevent.lock import RLock
-
-from Config import config
-from Debug import Debug
-from util import Msgpack
-from Crypt import CryptConnection
-from util import helper
-
-
-class Connection(object):
- __slots__ = (
- "sock", "sock_wrapped", "ip", "port", "cert_pin", "target_onion", "id", "protocol", "type", "server", "unpacker", "unpacker_bytes", "req_id", "ip_type",
- "handshake", "crypt", "connected", "event_connected", "closed", "start_time", "handshake_time", "last_recv_time", "is_private_ip", "is_tracker_connection",
- "last_message_time", "last_send_time", "last_sent_time", "incomplete_buff_recv", "bytes_recv", "bytes_sent", "cpu_time", "send_lock",
- "last_ping_delay", "last_req_time", "last_cmd_sent", "last_cmd_recv", "bad_actions", "sites", "name", "waiting_requests", "waiting_streams"
- )
-
- def __init__(self, server, ip, port, sock=None, target_onion=None, is_tracker_connection=False):
- self.sock = sock
- self.cert_pin = None
- if "#" in ip:
- ip, self.cert_pin = ip.split("#")
- self.target_onion = target_onion # Requested onion adress
- self.id = server.last_connection_id
- server.last_connection_id += 1
- self.protocol = "?"
- self.type = "?"
- self.ip_type = "?"
- self.port = int(port)
- self.setIp(ip)
-
- if helper.isPrivateIp(self.ip) and self.ip not in config.ip_local:
- self.is_private_ip = True
- else:
- self.is_private_ip = False
- self.is_tracker_connection = is_tracker_connection
-
- self.server = server
- self.unpacker = None # Stream incoming socket messages here
- self.unpacker_bytes = 0 # How many bytes the unpacker received
- self.req_id = 0 # Last request id
- self.handshake = {} # Handshake info got from peer
- self.crypt = None # Connection encryption method
- self.sock_wrapped = False # Socket wrapped to encryption
-
- self.connected = False
- self.event_connected = gevent.event.AsyncResult() # Solves on handshake received
- self.closed = False
-
- # Stats
- self.start_time = time.time()
- self.handshake_time = 0
- self.last_recv_time = 0
- self.last_message_time = 0
- self.last_send_time = 0
- self.last_sent_time = 0
- self.incomplete_buff_recv = 0
- self.bytes_recv = 0
- self.bytes_sent = 0
- self.last_ping_delay = None
- self.last_req_time = 0
- self.last_cmd_sent = None
- self.last_cmd_recv = None
- self.bad_actions = 0
- self.sites = 0
- self.cpu_time = 0.0
- self.send_lock = RLock()
-
- self.name = None
- self.updateName()
-
- self.waiting_requests = {} # Waiting sent requests
- self.waiting_streams = {} # Waiting response file streams
-
- def setIp(self, ip):
- self.ip = ip
- self.ip_type = helper.getIpType(ip)
- self.updateName()
-
- def createSocket(self):
- if helper.getIpType(self.ip) == "ipv6" and not hasattr(socket, "socket_noproxy"):
- # Create IPv6 connection as IPv4 when using proxy
- return socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
- else:
- return socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-
- def updateName(self):
- self.name = "Conn#%2s %-12s [%s]" % (self.id, self.ip, self.protocol)
-
- def __str__(self):
- return self.name
-
- def __repr__(self):
- return "<%s>" % self.__str__()
-
- def log(self, text):
- self.server.log.debug("%s > %s" % (self.name, text))
-
- def getValidSites(self):
- return [key for key, val in self.server.tor_manager.site_onions.items() if val == self.target_onion]
-
- def badAction(self, weight=1):
- self.bad_actions += weight
- if self.bad_actions > 40:
- self.close("Too many bad actions")
- elif self.bad_actions > 20:
- time.sleep(5)
-
- def goodAction(self):
- self.bad_actions = 0
-
- # Open connection to peer and wait for handshake
- def connect(self):
- self.type = "out"
- if self.ip_type == "onion":
- if not self.server.tor_manager or not self.server.tor_manager.enabled:
- raise Exception("Can't connect to onion addresses, no Tor controller present")
- self.sock = self.server.tor_manager.createSocket(self.ip, self.port)
- elif config.tor == "always" and helper.isPrivateIp(self.ip) and self.ip not in config.ip_local:
- raise Exception("Can't connect to local IPs in Tor: always mode")
- elif config.trackers_proxy != "disable" and config.tor != "always" and self.is_tracker_connection:
- if config.trackers_proxy == "tor":
- self.sock = self.server.tor_manager.createSocket(self.ip, self.port)
- else:
- import socks
- self.sock = socks.socksocket()
- proxy_ip, proxy_port = config.trackers_proxy.split(":")
- self.sock.set_proxy(socks.PROXY_TYPE_SOCKS5, proxy_ip, int(proxy_port))
- else:
- self.sock = self.createSocket()
-
- if "TCP_NODELAY" in dir(socket):
- self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
-
- timeout_before = self.sock.gettimeout()
- self.sock.settimeout(30)
- if self.ip_type == "ipv6" and not hasattr(self.sock, "proxy"):
- sock_address = (self.ip, self.port, 1, 1)
- else:
- sock_address = (self.ip, self.port)
-
- self.sock.connect(sock_address)
-
- # Implicit SSL
- should_encrypt = not self.ip_type == "onion" and self.ip not in self.server.broken_ssl_ips and self.ip not in config.ip_local
- if self.cert_pin:
- self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa", cert_pin=self.cert_pin)
- self.sock.do_handshake()
- self.crypt = "tls-rsa"
- self.sock_wrapped = True
- elif should_encrypt and "tls-rsa" in CryptConnection.manager.crypt_supported:
- try:
- self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa")
- self.sock.do_handshake()
- self.crypt = "tls-rsa"
- self.sock_wrapped = True
- except Exception as err:
- if not config.force_encryption:
- self.log("Crypt connection error, adding %s:%s as broken ssl. %s" % (self.ip, self.port, Debug.formatException(err)))
- self.server.broken_ssl_ips[self.ip] = True
- self.sock.close()
- self.crypt = None
- self.sock = self.createSocket()
- self.sock.settimeout(30)
- self.sock.connect(sock_address)
-
- # Detect protocol
- self.send({"cmd": "handshake", "req_id": 0, "params": self.getHandshakeInfo()})
- event_connected = self.event_connected
- gevent.spawn(self.messageLoop)
- connect_res = event_connected.get() # Wait for handshake
- self.sock.settimeout(timeout_before)
- return connect_res
-
- # Handle incoming connection
- def handleIncomingConnection(self, sock):
- self.log("Incoming connection...")
-
- if "TCP_NODELAY" in dir(socket):
- sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
-
- self.type = "in"
- if self.ip not in config.ip_local: # Clearnet: Check implicit SSL
- try:
- first_byte = sock.recv(1, gevent.socket.MSG_PEEK)
- if first_byte == b"\x16":
- self.log("Crypt in connection using implicit SSL")
- self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa", True)
- self.sock_wrapped = True
- self.crypt = "tls-rsa"
- except Exception as err:
- self.log("Socket peek error: %s" % Debug.formatException(err))
- self.messageLoop()
-
- def getMsgpackUnpacker(self):
- if self.handshake and self.handshake.get("use_bin_type"):
- return Msgpack.getUnpacker(fallback=True, decode=False)
- else: # Backward compatibility for <0.7.0
- return Msgpack.getUnpacker(fallback=True, decode=True)
-
- # Message loop for connection
- def messageLoop(self):
- if not self.sock:
- self.log("Socket error: No socket found")
- return False
- self.protocol = "v2"
- self.updateName()
- self.connected = True
- buff_len = 0
- req_len = 0
- self.unpacker_bytes = 0
-
- try:
- while not self.closed:
- buff = self.sock.recv(64 * 1024)
- if not buff:
- break # Connection closed
- buff_len = len(buff)
-
- # Statistics
- self.last_recv_time = time.time()
- self.incomplete_buff_recv += 1
- self.bytes_recv += buff_len
- self.server.bytes_recv += buff_len
- req_len += buff_len
-
- if not self.unpacker:
- self.unpacker = self.getMsgpackUnpacker()
- self.unpacker_bytes = 0
-
- self.unpacker.feed(buff)
- self.unpacker_bytes += buff_len
-
- while True:
- try:
- message = next(self.unpacker)
- except StopIteration:
- break
- if not type(message) is dict:
- if config.debug_socket:
- self.log("Invalid message type: %s, content: %r, buffer: %r" % (type(message), message, buff[0:16]))
- raise Exception("Invalid message type: %s" % type(message))
-
- # Stats
- self.incomplete_buff_recv = 0
- stat_key = message.get("cmd", "unknown")
- if stat_key == "response" and "to" in message:
- cmd_sent = self.waiting_requests.get(message["to"], {"cmd": "unknown"})["cmd"]
- stat_key = "response: %s" % cmd_sent
- if stat_key == "update":
- stat_key = "update: %s" % message["params"]["site"]
- self.server.stat_recv[stat_key]["bytes"] += req_len
- self.server.stat_recv[stat_key]["num"] += 1
- if "stream_bytes" in message:
- self.server.stat_recv[stat_key]["bytes"] += message["stream_bytes"]
- req_len = 0
-
- # Handle message
- if "stream_bytes" in message:
- buff_left = self.handleStream(message, buff)
- self.unpacker = self.getMsgpackUnpacker()
- self.unpacker.feed(buff_left)
- self.unpacker_bytes = len(buff_left)
- if config.debug_socket:
- self.log("Start new unpacker with buff_left: %r" % buff_left)
- else:
- self.handleMessage(message)
-
- message = None
- except Exception as err:
- if not self.closed:
- self.log("Socket error: %s" % Debug.formatException(err))
- self.server.stat_recv["error: %s" % err]["bytes"] += req_len
- self.server.stat_recv["error: %s" % err]["num"] += 1
- self.close("MessageLoop ended (closed: %s)" % self.closed) # MessageLoop ended, close connection
-
- def getUnpackerUnprocessedBytesNum(self):
- if "tell" in dir(self.unpacker):
- bytes_num = self.unpacker_bytes - self.unpacker.tell()
- else:
- bytes_num = self.unpacker._fb_buf_n - self.unpacker._fb_buf_o
- return bytes_num
-
- # Stream socket directly to a file
- def handleStream(self, message, buff):
- stream_bytes_left = message["stream_bytes"]
- file = self.waiting_streams[message["to"]]
-
- unprocessed_bytes_num = self.getUnpackerUnprocessedBytesNum()
-
- if unprocessed_bytes_num: # Found stream bytes in unpacker
- unpacker_stream_bytes = min(unprocessed_bytes_num, stream_bytes_left)
- buff_stream_start = len(buff) - unprocessed_bytes_num
- file.write(buff[buff_stream_start:buff_stream_start + unpacker_stream_bytes])
- stream_bytes_left -= unpacker_stream_bytes
- else:
- unpacker_stream_bytes = 0
-
- if config.debug_socket:
- self.log(
- "Starting stream %s: %s bytes (%s from unpacker, buff size: %s, unprocessed: %s)" %
- (message["to"], message["stream_bytes"], unpacker_stream_bytes, len(buff), unprocessed_bytes_num)
- )
-
- try:
- while 1:
- if stream_bytes_left <= 0:
- break
- stream_buff = self.sock.recv(min(64 * 1024, stream_bytes_left))
- if not stream_buff:
- break
- buff_len = len(stream_buff)
- stream_bytes_left -= buff_len
- file.write(stream_buff)
-
- # Statistics
- self.last_recv_time = time.time()
- self.incomplete_buff_recv += 1
- self.bytes_recv += buff_len
- self.server.bytes_recv += buff_len
- except Exception as err:
- self.log("Stream read error: %s" % Debug.formatException(err))
-
- if config.debug_socket:
- self.log("End stream %s, file pos: %s" % (message["to"], file.tell()))
-
- self.incomplete_buff_recv = 0
- self.waiting_requests[message["to"]]["evt"].set(message) # Set the response to event
- del self.waiting_streams[message["to"]]
- del self.waiting_requests[message["to"]]
-
- if unpacker_stream_bytes:
- return buff[buff_stream_start + unpacker_stream_bytes:]
- else:
- return b""
-
- # My handshake info
- def getHandshakeInfo(self):
- # No TLS for onion connections
- if self.ip_type == "onion":
- crypt_supported = []
- elif self.ip in self.server.broken_ssl_ips:
- crypt_supported = []
- else:
- crypt_supported = CryptConnection.manager.crypt_supported
- # No peer id for onion connections
- if self.ip_type == "onion" or self.ip in config.ip_local:
- peer_id = ""
- else:
- peer_id = self.server.peer_id
- # Setup peer lock from requested onion address
- if self.handshake and self.handshake.get("target_ip", "").endswith(".onion") and self.server.tor_manager.start_onions:
- self.target_onion = self.handshake.get("target_ip").replace(".onion", "") # My onion address
- if not self.server.tor_manager.site_onions.values():
- self.server.log.warning("Unknown target onion address: %s" % self.target_onion)
-
- handshake = {
- "version": config.version,
- "protocol": "v2",
- "use_bin_type": True,
- "peer_id": peer_id,
- "fileserver_port": self.server.port,
- "port_opened": self.server.port_opened.get(self.ip_type, None),
- "target_ip": self.ip,
- "rev": config.rev,
- "crypt_supported": crypt_supported,
- "crypt": self.crypt,
- "time": int(time.time())
- }
- if self.target_onion:
- handshake["onion"] = self.target_onion
- elif self.ip_type == "onion":
- handshake["onion"] = self.server.tor_manager.getOnion("global")
-
- if self.is_tracker_connection:
- handshake["tracker_connection"] = True
-
- if config.debug_socket:
- self.log("My Handshake: %s" % handshake)
-
- return handshake
-
- def setHandshake(self, handshake):
- if config.debug_socket:
- self.log("Remote Handshake: %s" % handshake)
-
- if handshake.get("peer_id") == self.server.peer_id and not handshake.get("tracker_connection") and not self.is_tracker_connection:
- self.close("Same peer id, can't connect to myself")
- self.server.peer_blacklist.append((handshake["target_ip"], handshake["fileserver_port"]))
- return False
-
- self.handshake = handshake
- if handshake.get("port_opened", None) is False and "onion" not in handshake and not self.is_private_ip: # Not connectable
- self.port = 0
- else:
- self.port = int(handshake["fileserver_port"]) # Set peer fileserver port
-
- if handshake.get("use_bin_type") and self.unpacker:
- unprocessed_bytes_num = self.getUnpackerUnprocessedBytesNum()
- self.log("Changing unpacker to bin type (unprocessed bytes: %s)" % unprocessed_bytes_num)
- unprocessed_bytes = self.unpacker.read_bytes(unprocessed_bytes_num)
- self.unpacker = self.getMsgpackUnpacker() # Create new unpacker for different msgpack type
- self.unpacker_bytes = 0
- if unprocessed_bytes:
- self.unpacker.feed(unprocessed_bytes)
-
- # Check if we can encrypt the connection
- if handshake.get("crypt_supported") and self.ip not in self.server.broken_ssl_ips:
- if type(handshake["crypt_supported"][0]) is bytes:
- handshake["crypt_supported"] = [item.decode() for item in handshake["crypt_supported"]] # Backward compatibility
-
- if self.ip_type == "onion" or self.ip in config.ip_local:
- crypt = None
- elif handshake.get("crypt"): # Recommended crypt by server
- crypt = handshake["crypt"]
- else: # Select the best supported on both sides
- crypt = CryptConnection.manager.selectCrypt(handshake["crypt_supported"])
-
- if crypt:
- self.crypt = crypt
-
- if self.type == "in" and handshake.get("onion") and not self.ip_type == "onion": # Set incoming connection's onion address
- if self.server.ips.get(self.ip) == self:
- del self.server.ips[self.ip]
- self.setIp(handshake["onion"] + ".onion")
- self.log("Changing ip to %s" % self.ip)
- self.server.ips[self.ip] = self
- self.updateName()
-
- self.event_connected.set(True) # Mark handshake as done
- self.event_connected = None
- self.handshake_time = time.time()
-
- # Handle incoming message
- def handleMessage(self, message):
- cmd = message["cmd"]
-
- self.last_message_time = time.time()
- self.last_cmd_recv = cmd
- if cmd == "response": # New style response
- if message["to"] in self.waiting_requests:
- if self.last_send_time and len(self.waiting_requests) == 1:
- ping = time.time() - self.last_send_time
- self.last_ping_delay = ping
- self.waiting_requests[message["to"]]["evt"].set(message) # Set the response to event
- del self.waiting_requests[message["to"]]
- elif message["to"] == 0: # Other peers handshake
- ping = time.time() - self.start_time
- if config.debug_socket:
- self.log("Handshake response: %s, ping: %s" % (message, ping))
- self.last_ping_delay = ping
- # Server switched to crypt, lets do it also if not crypted already
- if message.get("crypt") and not self.sock_wrapped:
- self.crypt = message["crypt"]
- server = (self.type == "in")
- self.log("Crypt out connection using: %s (server side: %s, ping: %.3fs)..." % (self.crypt, server, ping))
- self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server, cert_pin=self.cert_pin)
- self.sock.do_handshake()
- self.sock_wrapped = True
-
- if not self.sock_wrapped and self.cert_pin:
- self.close("Crypt connection error: Socket not encrypted, but certificate pin present")
- return
-
- self.setHandshake(message)
- else:
- self.log("Unknown response: %s" % message)
- elif cmd:
- self.server.num_recv += 1
- if cmd == "handshake":
- self.handleHandshake(message)
- else:
- self.server.handleRequest(self, message)
-
- # Incoming handshake set request
- def handleHandshake(self, message):
- self.setHandshake(message["params"])
- data = self.getHandshakeInfo()
- data["cmd"] = "response"
- data["to"] = message["req_id"]
- self.send(data) # Send response to handshake
- # Sent crypt request to client
- if self.crypt and not self.sock_wrapped:
- server = (self.type == "in")
- self.log("Crypt in connection using: %s (server side: %s)..." % (self.crypt, server))
- try:
- self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server, cert_pin=self.cert_pin)
- self.sock_wrapped = True
- except Exception as err:
- if not config.force_encryption:
- self.log("Crypt connection error, adding %s:%s as broken ssl. %s" % (self.ip, self.port, Debug.formatException(err)))
- self.server.broken_ssl_ips[self.ip] = True
- self.close("Broken ssl")
-
- if not self.sock_wrapped and self.cert_pin:
- self.close("Crypt connection error: Socket not encrypted, but certificate pin present")
-
- # Send data to connection
- def send(self, message, streaming=False):
- self.last_send_time = time.time()
- if config.debug_socket:
- self.log("Send: %s, to: %s, streaming: %s, site: %s, inner_path: %s, req_id: %s" % (
- message.get("cmd"), message.get("to"), streaming,
- message.get("params", {}).get("site"), message.get("params", {}).get("inner_path"),
- message.get("req_id"))
- )
-
- if not self.sock:
- self.log("Send error: missing socket")
- return False
-
- if not self.connected and message.get("cmd") != "handshake":
- self.log("Wait for handshake before send request")
- self.event_connected.get()
-
- try:
- stat_key = message.get("cmd", "unknown")
- if stat_key == "response":
- stat_key = "response: %s" % self.last_cmd_recv
- else:
- self.server.num_sent += 1
-
- self.server.stat_sent[stat_key]["num"] += 1
- if streaming:
- with self.send_lock:
- bytes_sent = Msgpack.stream(message, self.sock.sendall)
- self.bytes_sent += bytes_sent
- self.server.bytes_sent += bytes_sent
- self.server.stat_sent[stat_key]["bytes"] += bytes_sent
- message = None
- else:
- data = Msgpack.pack(message)
- self.bytes_sent += len(data)
- self.server.bytes_sent += len(data)
- self.server.stat_sent[stat_key]["bytes"] += len(data)
- message = None
- with self.send_lock:
- self.sock.sendall(data)
- except Exception as err:
- self.close("Send error: %s (cmd: %s)" % (err, stat_key))
- return False
- self.last_sent_time = time.time()
- return True
-
- # Stream file to connection without msgpacking
- def sendRawfile(self, file, read_bytes):
- buff = 64 * 1024
- bytes_left = read_bytes
- bytes_sent = 0
- while True:
- self.last_send_time = time.time()
- data = file.read(min(bytes_left, buff))
- bytes_sent += len(data)
- with self.send_lock:
- self.sock.sendall(data)
- bytes_left -= buff
- if bytes_left <= 0:
- break
- self.bytes_sent += bytes_sent
- self.server.bytes_sent += bytes_sent
- self.server.stat_sent["raw_file"]["num"] += 1
- self.server.stat_sent["raw_file"]["bytes"] += bytes_sent
- return True
-
- # Create and send a request to peer
- def request(self, cmd, params={}, stream_to=None):
- # Last command sent more than 10 sec ago, timeout
- if self.waiting_requests and self.protocol == "v2" and time.time() - max(self.last_req_time, self.last_recv_time) > 10:
- self.close("Request %s timeout: %.3fs" % (self.last_cmd_sent, time.time() - self.last_send_time))
- return False
-
- self.last_req_time = time.time()
- self.last_cmd_sent = cmd
- self.req_id += 1
- data = {"cmd": cmd, "req_id": self.req_id, "params": params}
- event = gevent.event.AsyncResult() # Create new event for response
- self.waiting_requests[self.req_id] = {"evt": event, "cmd": cmd}
- if stream_to:
- self.waiting_streams[self.req_id] = stream_to
- self.send(data) # Send request
- res = event.get() # Wait until event solves
- return res
-
- def ping(self):
- s = time.time()
- response = None
- with gevent.Timeout(10.0, False):
- try:
- response = self.request("ping")
- except Exception as err:
- self.log("Ping error: %s" % Debug.formatException(err))
- if response and "body" in response and response["body"] == b"Pong!":
- self.last_ping_delay = time.time() - s
- return True
- else:
- return False
-
- # Close connection
- def close(self, reason="Unknown"):
- if self.closed:
- return False # Already closed
- self.closed = True
- self.connected = False
- if self.event_connected:
- self.event_connected.set(False)
-
- self.log(
- "Closing connection: %s, waiting_requests: %s, sites: %s, buff: %s..." %
- (reason, len(self.waiting_requests), self.sites, self.incomplete_buff_recv)
- )
- for request in self.waiting_requests.values(): # Mark pending requests failed
- request["evt"].set(False)
- self.waiting_requests = {}
- self.waiting_streams = {}
- self.sites = 0
- self.server.removeConnection(self) # Remove connection from server registry
- try:
- if self.sock:
- self.sock.shutdown(gevent.socket.SHUT_WR)
- self.sock.close()
- except Exception as err:
- if config.debug_socket:
- self.log("Close error: %s" % err)
-
- # Little cleanup
- self.sock = None
- self.unpacker = None
- self.event_connected = None
diff --git a/src/Connection/ConnectionServer.py b/src/Connection/ConnectionServer.py
deleted file mode 100644
index 8d377aca..00000000
--- a/src/Connection/ConnectionServer.py
+++ /dev/null
@@ -1,373 +0,0 @@
-import logging
-import time
-import sys
-import socket
-from collections import defaultdict
-
-import gevent
-import msgpack
-from gevent.server import StreamServer
-from gevent.pool import Pool
-
-import util
-from util import helper
-from Debug import Debug
-from .Connection import Connection
-from Config import config
-from Crypt import CryptConnection
-from Crypt import CryptHash
-from Tor import TorManager
-from Site import SiteManager
-
-
-class ConnectionServer(object):
- def __init__(self, ip=None, port=None, request_handler=None):
- if not ip:
- if config.fileserver_ip_type == "ipv6":
- ip = "::1"
- else:
- ip = "127.0.0.1"
- port = 15441
- self.ip = ip
- self.port = port
- self.last_connection_id = 1 # Connection id incrementer
- self.log = logging.getLogger("ConnServer")
- self.port_opened = {}
- self.peer_blacklist = SiteManager.peer_blacklist
-
- self.tor_manager = TorManager(self.ip, self.port)
- self.connections = [] # Connections
- self.whitelist = config.ip_local # No flood protection on this ips
- self.ip_incoming = {} # Incoming connections from ip in the last minute to avoid connection flood
- self.broken_ssl_ips = {} # Peerids of broken ssl connections
- self.ips = {} # Connection by ip
- self.has_internet = True # Internet outage detection
-
- self.stream_server = None
- self.stream_server_proxy = None
- self.running = False
- self.stopping = False
- self.thread_checker = None
-
- self.stat_recv = defaultdict(lambda: defaultdict(int))
- self.stat_sent = defaultdict(lambda: defaultdict(int))
- self.bytes_recv = 0
- self.bytes_sent = 0
- self.num_recv = 0
- self.num_sent = 0
-
- self.num_incoming = 0
- self.num_outgoing = 0
- self.had_external_incoming = False
-
- self.timecorrection = 0.0
- self.pool = Pool(500) # do not accept more than 500 connections
-
- # Bittorrent style peerid
- self.peer_id = "-UT3530-%s" % CryptHash.random(12, "base64")
-
- # Check msgpack version
- if msgpack.version[0] == 0 and msgpack.version[1] < 4:
- self.log.error(
- "Error: Unsupported msgpack version: %s (<0.4.0), please run `sudo apt-get install python-pip; sudo pip install msgpack --upgrade`" %
- str(msgpack.version)
- )
- sys.exit(0)
-
- if request_handler:
- self.handleRequest = request_handler
-
- def start(self, check_connections=True):
- if self.stopping:
- return False
- self.running = True
- if check_connections:
- self.thread_checker = gevent.spawn(self.checkConnections)
- CryptConnection.manager.loadCerts()
- if config.tor != "disable":
- self.tor_manager.start()
- if not self.port:
- self.log.info("No port found, not binding")
- return False
-
- self.log.debug("Binding to: %s:%s, (msgpack: %s), supported crypt: %s" % (
- self.ip, self.port, ".".join(map(str, msgpack.version)),
- CryptConnection.manager.crypt_supported
- ))
- try:
- self.stream_server = StreamServer(
- (self.ip, self.port), self.handleIncomingConnection, spawn=self.pool, backlog=100
- )
- except Exception as err:
- self.log.info("StreamServer create error: %s" % Debug.formatException(err))
-
- def listen(self):
- if not self.running:
- return None
-
- if self.stream_server_proxy:
- gevent.spawn(self.listenProxy)
- try:
- self.stream_server.serve_forever()
- except Exception as err:
- self.log.info("StreamServer listen error: %s" % err)
- return False
- self.log.debug("Stopped.")
-
- def stop(self):
- self.log.debug("Stopping %s" % self.stream_server)
- self.stopping = True
- self.running = False
- if self.thread_checker:
- gevent.kill(self.thread_checker)
- if self.stream_server:
- self.stream_server.stop()
-
- def closeConnections(self):
- self.log.debug("Closing all connection: %s" % len(self.connections))
- for connection in self.connections[:]:
- connection.close("Close all connections")
-
- def handleIncomingConnection(self, sock, addr):
- if config.offline:
- sock.close()
- return False
-
- ip, port = addr[0:2]
- ip = ip.lower()
- if ip.startswith("::ffff:"): # IPv6 to IPv4 mapping
- ip = ip.replace("::ffff:", "", 1)
- self.num_incoming += 1
-
- if not self.had_external_incoming and not helper.isPrivateIp(ip):
- self.had_external_incoming = True
-
- # Connection flood protection
- if ip in self.ip_incoming and ip not in self.whitelist:
- self.ip_incoming[ip] += 1
- if self.ip_incoming[ip] > 6: # Allow 6 in 1 minute from same ip
- self.log.debug("Connection flood detected from %s" % ip)
- time.sleep(30)
- sock.close()
- return False
- else:
- self.ip_incoming[ip] = 1
-
- connection = Connection(self, ip, port, sock)
- self.connections.append(connection)
- if ip not in config.ip_local:
- self.ips[ip] = connection
- connection.handleIncomingConnection(sock)
-
- def handleMessage(self, *args, **kwargs):
- pass
-
- def getConnection(self, ip=None, port=None, peer_id=None, create=True, site=None, is_tracker_connection=False):
- ip_type = helper.getIpType(ip)
- has_per_site_onion = (ip.endswith(".onion") or self.port_opened.get(ip_type, None) == False) and self.tor_manager.start_onions and site
- if has_per_site_onion: # Site-unique connection for Tor
- if ip.endswith(".onion"):
- site_onion = self.tor_manager.getOnion(site.address)
- else:
- site_onion = self.tor_manager.getOnion("global")
- key = ip + site_onion
- else:
- key = ip
-
- # Find connection by ip
- if key in self.ips:
- connection = self.ips[key]
- if not peer_id or connection.handshake.get("peer_id") == peer_id: # Filter by peer_id
- if not connection.connected and create:
- succ = connection.event_connected.get() # Wait for connection
- if not succ:
- raise Exception("Connection event return error")
- return connection
-
- # Recover from connection pool
- for connection in self.connections:
- if connection.ip == ip:
- if peer_id and connection.handshake.get("peer_id") != peer_id: # Does not match
- continue
- if ip.endswith(".onion") and self.tor_manager.start_onions and ip.replace(".onion", "") != connection.target_onion:
- # For different site
- continue
- if not connection.connected and create:
- succ = connection.event_connected.get() # Wait for connection
- if not succ:
- raise Exception("Connection event return error")
- return connection
-
- # No connection found
- if create and not config.offline: # Allow to create new connection if not found
- if port == 0:
- raise Exception("This peer is not connectable")
-
- if (ip, port) in self.peer_blacklist and not is_tracker_connection:
- raise Exception("This peer is blacklisted")
-
- try:
- if has_per_site_onion: # Lock connection to site
- connection = Connection(self, ip, port, target_onion=site_onion, is_tracker_connection=is_tracker_connection)
- else:
- connection = Connection(self, ip, port, is_tracker_connection=is_tracker_connection)
- self.num_outgoing += 1
- self.ips[key] = connection
- self.connections.append(connection)
- connection.log("Connecting... (site: %s)" % site)
- succ = connection.connect()
- if not succ:
- connection.close("Connection event return error")
- raise Exception("Connection event return error")
-
- except Exception as err:
- connection.close("%s Connect error: %s" % (ip, Debug.formatException(err)))
- raise err
-
- if len(self.connections) > config.global_connected_limit:
- gevent.spawn(self.checkMaxConnections)
-
- return connection
- else:
- return None
-
- def removeConnection(self, connection):
- # Delete if same as in registry
- if self.ips.get(connection.ip) == connection:
- del self.ips[connection.ip]
- # Site locked connection
- if connection.target_onion:
- if self.ips.get(connection.ip + connection.target_onion) == connection:
- del self.ips[connection.ip + connection.target_onion]
- # Cert pinned connection
- if connection.cert_pin and self.ips.get(connection.ip + "#" + connection.cert_pin) == connection:
- del self.ips[connection.ip + "#" + connection.cert_pin]
-
- if connection in self.connections:
- self.connections.remove(connection)
-
- def checkConnections(self):
- run_i = 0
- time.sleep(15)
- while self.running:
- run_i += 1
- self.ip_incoming = {} # Reset connected ips counter
- last_message_time = 0
- s = time.time()
- for connection in self.connections[:]: # Make a copy
- if connection.ip.endswith(".onion") or config.tor == "always":
- timeout_multipler = 2
- else:
- timeout_multipler = 1
-
- idle = time.time() - max(connection.last_recv_time, connection.start_time, connection.last_message_time)
- if connection.last_message_time > last_message_time and not connection.is_private_ip:
- # Message from local IPs does not means internet connection
- last_message_time = connection.last_message_time
-
- if connection.unpacker and idle > 30:
- # Delete the unpacker if not needed
- del connection.unpacker
- connection.unpacker = None
-
- elif connection.last_cmd_sent == "announce" and idle > 20: # Bootstrapper connection close after 20 sec
- connection.close("[Cleanup] Tracker connection, idle: %.3fs" % idle)
-
- if idle > 60 * 60:
- # Wake up after 1h
- connection.close("[Cleanup] After wakeup, idle: %.3fs" % idle)
-
- elif idle > 20 * 60 and connection.last_send_time < time.time() - 10:
- # Idle more than 20 min and we have not sent request in last 10 sec
- if not connection.ping():
- connection.close("[Cleanup] Ping timeout")
-
- elif idle > 10 * timeout_multipler and connection.incomplete_buff_recv > 0:
- # Incomplete data with more than 10 sec idle
- connection.close("[Cleanup] Connection buff stalled")
-
- elif idle > 10 * timeout_multipler and connection.protocol == "?": # No connection after 10 sec
- connection.close(
- "[Cleanup] Connect timeout: %.3fs" % idle
- )
-
- elif idle > 10 * timeout_multipler and connection.waiting_requests and time.time() - connection.last_send_time > 10 * timeout_multipler:
- # Sent command and no response in 10 sec
- connection.close(
- "[Cleanup] Command %s timeout: %.3fs" % (connection.last_cmd_sent, time.time() - connection.last_send_time)
- )
-
- elif idle < 60 and connection.bad_actions > 40:
- connection.close(
- "[Cleanup] Too many bad actions: %s" % connection.bad_actions
- )
-
- elif idle > 5 * 60 and connection.sites == 0:
- connection.close(
- "[Cleanup] No site for connection"
- )
-
- elif run_i % 90 == 0:
- # Reset bad action counter every 30 min
- connection.bad_actions = 0
-
- # Internet outage detection
- if time.time() - last_message_time > max(60, 60 * 10 / max(1, float(len(self.connections)) / 50)):
- # Offline: Last message more than 60-600sec depending on connection number
- if self.has_internet and last_message_time:
- self.has_internet = False
- self.onInternetOffline()
- else:
- # Online
- if not self.has_internet:
- self.has_internet = True
- self.onInternetOnline()
-
- self.timecorrection = self.getTimecorrection()
-
- if time.time() - s > 0.01:
- self.log.debug("Connection cleanup in %.3fs" % (time.time() - s))
-
- time.sleep(15)
- self.log.debug("Checkconnections ended")
-
- @util.Noparallel(blocking=False)
- def checkMaxConnections(self):
- if len(self.connections) < config.global_connected_limit:
- return 0
-
- s = time.time()
- num_connected_before = len(self.connections)
- self.connections.sort(key=lambda connection: connection.sites)
- num_closed = 0
- for connection in self.connections:
- idle = time.time() - max(connection.last_recv_time, connection.start_time, connection.last_message_time)
- if idle > 60:
- connection.close("Connection limit reached")
- num_closed += 1
- if num_closed > config.global_connected_limit * 0.1:
- break
-
- self.log.debug("Closed %s connections of %s after reached limit %s in %.3fs" % (
- num_closed, num_connected_before, config.global_connected_limit, time.time() - s
- ))
- return num_closed
-
- def onInternetOnline(self):
- self.log.info("Internet online")
-
- def onInternetOffline(self):
- self.had_external_incoming = False
- self.log.info("Internet offline")
-
- def getTimecorrection(self):
- corrections = sorted([
- connection.handshake.get("time") - connection.handshake_time + connection.last_ping_delay
- for connection in self.connections
- if connection.handshake.get("time") and connection.last_ping_delay
- ])
- if len(corrections) < 9:
- return 0.0
- mid = int(len(corrections) / 2 - 1)
- median = (corrections[mid - 1] + corrections[mid] + corrections[mid + 1]) / 3
- return median
diff --git a/src/Connection/__init__.py b/src/Connection/__init__.py
deleted file mode 100644
index d419a3f0..00000000
--- a/src/Connection/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-from .ConnectionServer import ConnectionServer
-from .Connection import Connection
diff --git a/src/Content/ContentDb.py b/src/Content/ContentDb.py
deleted file mode 100644
index f284581e..00000000
--- a/src/Content/ContentDb.py
+++ /dev/null
@@ -1,162 +0,0 @@
-import os
-
-from Db.Db import Db, DbTableError
-from Config import config
-from Plugin import PluginManager
-from Debug import Debug
-
-
-@PluginManager.acceptPlugins
-class ContentDb(Db):
- def __init__(self, path):
- Db.__init__(self, {"db_name": "ContentDb", "tables": {}}, path)
- self.foreign_keys = True
-
- def init(self):
- try:
- self.schema = self.getSchema()
- try:
- self.checkTables()
- except DbTableError:
- pass
- self.log.debug("Checking foreign keys...")
- foreign_key_error = self.execute("PRAGMA foreign_key_check").fetchone()
- if foreign_key_error:
- raise Exception("Database foreign key error: %s" % foreign_key_error)
- except Exception as err:
- self.log.error("Error loading content.db: %s, rebuilding..." % Debug.formatException(err))
- self.close()
- os.unlink(self.db_path) # Remove and try again
- Db.__init__(self, {"db_name": "ContentDb", "tables": {}}, self.db_path)
- self.foreign_keys = True
- self.schema = self.getSchema()
- try:
- self.checkTables()
- except DbTableError:
- pass
- self.site_ids = {}
- self.sites = {}
-
- def getSchema(self):
- schema = {}
- schema["db_name"] = "ContentDb"
- schema["version"] = 3
- schema["tables"] = {}
-
- if not self.getTableVersion("site"):
- self.log.debug("Migrating from table version-less content.db")
- version = int(self.execute("PRAGMA user_version").fetchone()[0])
- if version > 0:
- self.checkTables()
- self.execute("INSERT INTO keyvalue ?", {"json_id": 0, "key": "table.site.version", "value": 1})
- self.execute("INSERT INTO keyvalue ?", {"json_id": 0, "key": "table.content.version", "value": 1})
-
- schema["tables"]["site"] = {
- "cols": [
- ["site_id", "INTEGER PRIMARY KEY ASC NOT NULL UNIQUE"],
- ["address", "TEXT NOT NULL"]
- ],
- "indexes": [
- "CREATE UNIQUE INDEX site_address ON site (address)"
- ],
- "schema_changed": 1
- }
-
- schema["tables"]["content"] = {
- "cols": [
- ["content_id", "INTEGER PRIMARY KEY UNIQUE NOT NULL"],
- ["site_id", "INTEGER REFERENCES site (site_id) ON DELETE CASCADE"],
- ["inner_path", "TEXT"],
- ["size", "INTEGER"],
- ["size_files", "INTEGER"],
- ["size_files_optional", "INTEGER"],
- ["modified", "INTEGER"]
- ],
- "indexes": [
- "CREATE UNIQUE INDEX content_key ON content (site_id, inner_path)",
- "CREATE INDEX content_modified ON content (site_id, modified)"
- ],
- "schema_changed": 1
- }
-
- return schema
-
- def initSite(self, site):
- self.sites[site.address] = site
-
- def needSite(self, site):
- if site.address not in self.site_ids:
- self.execute("INSERT OR IGNORE INTO site ?", {"address": site.address})
- self.site_ids = {}
- for row in self.execute("SELECT * FROM site"):
- self.site_ids[row["address"]] = row["site_id"]
- return self.site_ids[site.address]
-
- def deleteSite(self, site):
- site_id = self.site_ids.get(site.address, 0)
- if site_id:
- self.execute("DELETE FROM site WHERE site_id = :site_id", {"site_id": site_id})
- del self.site_ids[site.address]
- del self.sites[site.address]
-
- def setContent(self, site, inner_path, content, size=0):
- self.insertOrUpdate("content", {
- "size": size,
- "size_files": sum([val["size"] for key, val in content.get("files", {}).items()]),
- "size_files_optional": sum([val["size"] for key, val in content.get("files_optional", {}).items()]),
- "modified": int(content.get("modified", 0))
- }, {
- "site_id": self.site_ids.get(site.address, 0),
- "inner_path": inner_path
- })
-
- def deleteContent(self, site, inner_path):
- self.execute("DELETE FROM content WHERE ?", {"site_id": self.site_ids.get(site.address, 0), "inner_path": inner_path})
-
- def loadDbDict(self, site):
- res = self.execute(
- "SELECT GROUP_CONCAT(inner_path, '|') AS inner_paths FROM content WHERE ?",
- {"site_id": self.site_ids.get(site.address, 0)}
- )
- row = res.fetchone()
- if row and row["inner_paths"]:
- inner_paths = row["inner_paths"].split("|")
- return dict.fromkeys(inner_paths, False)
- else:
- return {}
-
- def getTotalSize(self, site, ignore=None):
- params = {"site_id": self.site_ids.get(site.address, 0)}
- if ignore:
- params["not__inner_path"] = ignore
- res = self.execute("SELECT SUM(size) + SUM(size_files) AS size, SUM(size_files_optional) AS size_optional FROM content WHERE ?", params)
- row = dict(res.fetchone())
-
- if not row["size"]:
- row["size"] = 0
- if not row["size_optional"]:
- row["size_optional"] = 0
-
- return row["size"], row["size_optional"]
-
- def listModified(self, site, after=None, before=None):
- params = {"site_id": self.site_ids.get(site.address, 0)}
- if after:
- params["modified>"] = after
- if before:
- params["modified<"] = before
- res = self.execute("SELECT inner_path, modified FROM content WHERE ?", params)
- return {row["inner_path"]: row["modified"] for row in res}
-
-content_dbs = {}
-
-
-def getContentDb(path=None):
- if not path:
- path = "%s/content.db" % config.data_dir
- if path not in content_dbs:
- content_dbs[path] = ContentDb(path)
- content_dbs[path].init()
- return content_dbs[path]
-
-getContentDb() # Pre-connect to default one
diff --git a/src/Content/ContentDbDict.py b/src/Content/ContentDbDict.py
deleted file mode 100644
index 01df0427..00000000
--- a/src/Content/ContentDbDict.py
+++ /dev/null
@@ -1,155 +0,0 @@
-import time
-import os
-
-from . import ContentDb
-from Debug import Debug
-from Config import config
-
-
-class ContentDbDict(dict):
- def __init__(self, site, *args, **kwargs):
- s = time.time()
- self.site = site
- self.cached_keys = []
- self.log = self.site.log
- self.db = ContentDb.getContentDb()
- self.db_id = self.db.needSite(site)
- self.num_loaded = 0
- super(ContentDbDict, self).__init__(self.db.loadDbDict(site)) # Load keys from database
- self.log.debug("ContentDb init: %.3fs, found files: %s, sites: %s" % (time.time() - s, len(self), len(self.db.site_ids)))
-
- def loadItem(self, key):
- try:
- self.num_loaded += 1
- if self.num_loaded % 100 == 0:
- if config.verbose:
- self.log.debug("Loaded json: %s (latest: %s) called by: %s" % (self.num_loaded, key, Debug.formatStack()))
- else:
- self.log.debug("Loaded json: %s (latest: %s)" % (self.num_loaded, key))
- content = self.site.storage.loadJson(key)
- dict.__setitem__(self, key, content)
- except IOError:
- if dict.get(self, key):
- self.__delitem__(key) # File not exists anymore
- raise KeyError(key)
-
- self.addCachedKey(key)
- self.checkLimit()
-
- return content
-
- def getItemSize(self, key):
- return self.site.storage.getSize(key)
-
- # Only keep last 10 accessed json in memory
- def checkLimit(self):
- if len(self.cached_keys) > 10:
- key_deleted = self.cached_keys.pop(0)
- dict.__setitem__(self, key_deleted, False)
-
- def addCachedKey(self, key):
- if key not in self.cached_keys and key != "content.json" and len(key) > 40: # Always keep keys smaller than 40 char
- self.cached_keys.append(key)
-
- def __getitem__(self, key):
- val = dict.get(self, key)
- if val: # Already loaded
- return val
- elif val is None: # Unknown key
- raise KeyError(key)
- elif val is False: # Loaded before, but purged from cache
- return self.loadItem(key)
-
- def __setitem__(self, key, val):
- self.addCachedKey(key)
- self.checkLimit()
- size = self.getItemSize(key)
- self.db.setContent(self.site, key, val, size)
- dict.__setitem__(self, key, val)
-
- def __delitem__(self, key):
- self.db.deleteContent(self.site, key)
- dict.__delitem__(self, key)
- try:
- self.cached_keys.remove(key)
- except ValueError:
- pass
-
- def iteritems(self):
- for key in dict.keys(self):
- try:
- val = self[key]
- except Exception as err:
- self.log.warning("Error loading %s: %s" % (key, err))
- continue
- yield key, val
-
- def items(self):
- back = []
- for key in dict.keys(self):
- try:
- val = self[key]
- except Exception as err:
- self.log.warning("Error loading %s: %s" % (key, err))
- continue
- back.append((key, val))
- return back
-
- def values(self):
- back = []
- for key, val in dict.iteritems(self):
- if not val:
- try:
- val = self.loadItem(key)
- except Exception:
- continue
- back.append(val)
- return back
-
- def get(self, key, default=None):
- try:
- return self.__getitem__(key)
- except KeyError:
- return default
- except Exception as err:
- self.site.bad_files[key] = self.site.bad_files.get(key, 1)
- dict.__delitem__(self, key)
- self.log.warning("Error loading %s: %s" % (key, err))
- return default
-
- def execute(self, query, params={}):
- params["site_id"] = self.db_id
- return self.db.execute(query, params)
-
-if __name__ == "__main__":
- import psutil
- process = psutil.Process(os.getpid())
- s_mem = process.memory_info()[0] / float(2 ** 20)
- root = "data-live/1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27"
- contents = ContentDbDict("1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27", root)
- print("Init len", len(contents))
-
- s = time.time()
- for dir_name in os.listdir(root + "/data/users/")[0:8000]:
- contents["data/users/%s/content.json" % dir_name]
- print("Load: %.3fs" % (time.time() - s))
-
- s = time.time()
- found = 0
- for key, val in contents.items():
- found += 1
- assert key
- assert val
- print("Found:", found)
- print("Iteritem: %.3fs" % (time.time() - s))
-
- s = time.time()
- found = 0
- for key in list(contents.keys()):
- found += 1
- assert key in contents
- print("In: %.3fs" % (time.time() - s))
-
- print("Len:", len(list(contents.values())), len(list(contents.keys())))
-
- print("Mem: +", process.memory_info()[0] / float(2 ** 20) - s_mem)
diff --git a/src/Content/ContentManager.py b/src/Content/ContentManager.py
deleted file mode 100644
index 27da402b..00000000
--- a/src/Content/ContentManager.py
+++ /dev/null
@@ -1,1045 +0,0 @@
-import json
-import time
-import re
-import os
-import copy
-import base64
-import sys
-
-import gevent
-
-from Debug import Debug
-from Crypt import CryptHash
-from Config import config
-from util import helper
-from util import Diff
-from util import SafeRe
-from Peer import PeerHashfield
-from .ContentDbDict import ContentDbDict
-from Plugin import PluginManager
-
-
-class VerifyError(Exception):
- pass
-
-
-class SignError(Exception):
- pass
-
-
-@PluginManager.acceptPlugins
-class ContentManager(object):
-
- def __init__(self, site):
- self.site = site
- self.log = self.site.log
- self.contents = ContentDbDict(site)
- self.hashfield = PeerHashfield()
- self.has_optional_files = False
-
- # Load all content.json files
- def loadContents(self):
- if len(self.contents) == 0:
- self.log.info("ContentDb not initialized, load files from filesystem...")
- self.loadContent(add_bad_files=False, delete_removed_files=False)
- self.site.settings["size"], self.site.settings["size_optional"] = self.getTotalSize()
-
- # Load hashfield cache
- if "hashfield" in self.site.settings.get("cache", {}):
- self.hashfield.frombytes(base64.b64decode(self.site.settings["cache"]["hashfield"]))
- del self.site.settings["cache"]["hashfield"]
- elif self.contents.get("content.json") and self.site.settings["size_optional"] > 0:
- self.site.storage.updateBadFiles() # No hashfield cache created yet
- self.has_optional_files = bool(self.hashfield)
-
- self.contents.db.initSite(self.site)
-
- def getFileChanges(self, old_files, new_files):
- deleted = {key: val for key, val in old_files.items() if key not in new_files}
- deleted_hashes = {val.get("sha512"): key for key, val in old_files.items() if key not in new_files}
- added = {key: val for key, val in new_files.items() if key not in old_files}
- renamed = {}
- for relative_path, node in added.items():
- hash = node.get("sha512")
- if hash in deleted_hashes:
- relative_path_old = deleted_hashes[hash]
- renamed[relative_path_old] = relative_path
- del(deleted[relative_path_old])
- return list(deleted), renamed
-
- # Load content.json to self.content
- # Return: Changed files ["index.html", "data/messages.json"], Deleted files ["old.jpg"]
- def loadContent(self, content_inner_path="content.json", add_bad_files=True, delete_removed_files=True, load_includes=True, force=False):
- content_inner_path = content_inner_path.strip("/") # Remove / from beginning
- old_content = self.contents.get(content_inner_path)
- content_path = self.site.storage.getPath(content_inner_path)
- content_dir = helper.getDirname(self.site.storage.getPath(content_inner_path))
- content_inner_dir = helper.getDirname(content_inner_path)
-
- if os.path.isfile(content_path):
- try:
- # Check if file is newer than what we have
- if not force and old_content and not self.site.settings.get("own"):
- for line in open(content_path):
- if '"modified"' not in line:
- continue
- match = re.search(r"([0-9\.]+),$", line.strip(" \r\n"))
- if match and float(match.group(1)) <= old_content.get("modified", 0):
- self.log.debug("%s loadContent same json file, skipping" % content_inner_path)
- return [], []
-
- new_content = self.site.storage.loadJson(content_inner_path)
- except Exception as err:
- self.log.warning("%s load error: %s" % (content_path, Debug.formatException(err)))
- return [], []
- else:
- self.log.debug("Content.json not exist: %s" % content_path)
- return [], [] # Content.json not exist
-
- try:
- # Get the files where the sha512 changed
- changed = []
- deleted = []
- # Check changed
- for relative_path, info in new_content.get("files", {}).items():
- if "sha512" in info:
- hash_type = "sha512"
- else: # Backward compatibility
- hash_type = "sha1"
-
- new_hash = info[hash_type]
- if old_content and old_content["files"].get(relative_path): # We have the file in the old content
- old_hash = old_content["files"][relative_path].get(hash_type)
- else: # The file is not in the old content
- old_hash = None
- if old_hash != new_hash:
- changed.append(content_inner_dir + relative_path)
-
- # Check changed optional files
- for relative_path, info in new_content.get("files_optional", {}).items():
- file_inner_path = content_inner_dir + relative_path
- new_hash = info["sha512"]
- if old_content and old_content.get("files_optional", {}).get(relative_path):
- # We have the file in the old content
- old_hash = old_content["files_optional"][relative_path].get("sha512")
- if old_hash != new_hash and self.site.isDownloadable(file_inner_path):
- changed.append(file_inner_path) # Download new file
- elif old_hash != new_hash and self.hashfield.hasHash(old_hash) and not self.site.settings.get("own"):
- try:
- old_hash_id = self.hashfield.getHashId(old_hash)
- self.optionalRemoved(file_inner_path, old_hash_id, old_content["files_optional"][relative_path]["size"])
- self.optionalDelete(file_inner_path)
- self.log.debug("Deleted changed optional file: %s" % file_inner_path)
- except Exception as err:
- self.log.warning("Error deleting file %s: %s" % (file_inner_path, Debug.formatException(err)))
- else: # The file is not in the old content
- if self.site.isDownloadable(file_inner_path):
- changed.append(file_inner_path) # Download new file
-
- # Check deleted
- if old_content:
- old_files = dict(
- old_content.get("files", {}),
- **old_content.get("files_optional", {})
- )
-
- new_files = dict(
- new_content.get("files", {}),
- **new_content.get("files_optional", {})
- )
-
- deleted, renamed = self.getFileChanges(old_files, new_files)
-
- for relative_path_old, relative_path_new in renamed.items():
- self.log.debug("Renaming: %s -> %s" % (relative_path_old, relative_path_new))
- if relative_path_new in new_content.get("files_optional", {}):
- self.optionalRenamed(content_inner_dir + relative_path_old, content_inner_dir + relative_path_new)
- if self.site.storage.isFile(relative_path_old):
- try:
- self.site.storage.rename(relative_path_old, relative_path_new)
- if relative_path_new in changed:
- changed.remove(relative_path_new)
- self.log.debug("Renamed: %s -> %s" % (relative_path_old, relative_path_new))
- except Exception as err:
- self.log.warning("Error renaming file: %s -> %s %s" % (relative_path_old, relative_path_new, err))
-
- if deleted and not self.site.settings.get("own"):
- # Deleting files that no longer in content.json
- for file_relative_path in deleted:
- file_inner_path = content_inner_dir + file_relative_path
- try:
- # Check if the deleted file is optional
- if old_content.get("files_optional") and old_content["files_optional"].get(file_relative_path):
- self.optionalDelete(file_inner_path)
- old_hash = old_content["files_optional"][file_relative_path].get("sha512")
- if self.hashfield.hasHash(old_hash):
- old_hash_id = self.hashfield.getHashId(old_hash)
- self.optionalRemoved(file_inner_path, old_hash_id, old_content["files_optional"][file_relative_path]["size"])
- else:
- self.site.storage.delete(file_inner_path)
-
- self.log.debug("Deleted file: %s" % file_inner_path)
- except Exception as err:
- self.log.debug("Error deleting file %s: %s" % (file_inner_path, Debug.formatException(err)))
-
- # Cleanup empty dirs
- tree = {root: [dirs, files] for root, dirs, files in os.walk(self.site.storage.getPath(content_inner_dir))}
- for root in sorted(tree, key=len, reverse=True):
- dirs, files = tree[root]
- if dirs == [] and files == []:
- root_inner_path = self.site.storage.getInnerPath(root.replace("\\", "/"))
- self.log.debug("Empty directory: %s, cleaning up." % root_inner_path)
- try:
- self.site.storage.deleteDir(root_inner_path)
- # Remove from tree dict to reflect changed state
- tree[os.path.dirname(root)][0].remove(os.path.basename(root))
- except Exception as err:
- self.log.debug("Error deleting empty directory %s: %s" % (root_inner_path, err))
-
- # Check archived
- if old_content and "user_contents" in new_content and "archived" in new_content["user_contents"]:
- old_archived = old_content.get("user_contents", {}).get("archived", {})
- new_archived = new_content.get("user_contents", {}).get("archived", {})
- self.log.debug("old archived: %s, new archived: %s" % (len(old_archived), len(new_archived)))
- archived_changed = {
- key: date_archived
- for key, date_archived in new_archived.items()
- if old_archived.get(key) != new_archived[key]
- }
- if archived_changed:
- self.log.debug("Archived changed: %s" % archived_changed)
- for archived_dirname, date_archived in archived_changed.items():
- archived_inner_path = content_inner_dir + archived_dirname + "/content.json"
- if self.contents.get(archived_inner_path, {}).get("modified", 0) < date_archived:
- self.removeContent(archived_inner_path)
- deleted += archived_inner_path
- self.site.settings["size"], self.site.settings["size_optional"] = self.getTotalSize()
-
- # Check archived before
- if old_content and "user_contents" in new_content and "archived_before" in new_content["user_contents"]:
- old_archived_before = old_content.get("user_contents", {}).get("archived_before", 0)
- new_archived_before = new_content.get("user_contents", {}).get("archived_before", 0)
- if old_archived_before != new_archived_before:
- self.log.debug("Archived before changed: %s -> %s" % (old_archived_before, new_archived_before))
-
- # Remove downloaded archived files
- num_removed_contents = 0
- for archived_inner_path in self.listModified(before=new_archived_before):
- if archived_inner_path.startswith(content_inner_dir) and archived_inner_path != content_inner_path:
- self.removeContent(archived_inner_path)
- num_removed_contents += 1
- self.site.settings["size"], self.site.settings["size_optional"] = self.getTotalSize()
-
- # Remove archived files from download queue
- num_removed_bad_files = 0
- for bad_file in list(self.site.bad_files.keys()):
- if bad_file.endswith("content.json"):
- del self.site.bad_files[bad_file]
- num_removed_bad_files += 1
-
- if num_removed_bad_files > 0:
- self.site.worker_manager.removeSolvedFileTasks(mark_as_good=False)
- gevent.spawn(self.site.update, since=0)
-
- self.log.debug("Archived removed contents: %s, removed bad files: %s" % (num_removed_contents, num_removed_bad_files))
-
- # Load includes
- if load_includes and "includes" in new_content:
- for relative_path, info in list(new_content["includes"].items()):
- include_inner_path = content_inner_dir + relative_path
- if self.site.storage.isFile(include_inner_path): # Content.json exists, load it
- include_changed, include_deleted = self.loadContent(
- include_inner_path, add_bad_files=add_bad_files, delete_removed_files=delete_removed_files
- )
- if include_changed:
- changed += include_changed # Add changed files
- if include_deleted:
- deleted += include_deleted # Add changed files
- else: # Content.json not exist, add to changed files
- self.log.debug("Missing include: %s" % include_inner_path)
- changed += [include_inner_path]
-
- # Load blind user includes (all subdir)
- if load_includes and "user_contents" in new_content:
- for relative_dir in os.listdir(content_dir):
- include_inner_path = content_inner_dir + relative_dir + "/content.json"
- if not self.site.storage.isFile(include_inner_path):
- continue # Content.json not exist
- include_changed, include_deleted = self.loadContent(
- include_inner_path, add_bad_files=add_bad_files, delete_removed_files=delete_removed_files,
- load_includes=False
- )
- if include_changed:
- changed += include_changed # Add changed files
- if include_deleted:
- deleted += include_deleted # Add changed files
-
- # Save some memory
- new_content["signs"] = None
- if "cert_sign" in new_content:
- new_content["cert_sign"] = None
-
- if new_content.get("files_optional"):
- self.has_optional_files = True
- # Update the content
- self.contents[content_inner_path] = new_content
- except Exception as err:
- self.log.warning("%s parse error: %s" % (content_inner_path, Debug.formatException(err)))
- return [], [] # Content.json parse error
-
- # Add changed files to bad files
- if add_bad_files:
- for inner_path in changed:
- self.site.bad_files[inner_path] = self.site.bad_files.get(inner_path, 0) + 1
- for inner_path in deleted:
- if inner_path in self.site.bad_files:
- del self.site.bad_files[inner_path]
- self.site.worker_manager.removeSolvedFileTasks()
-
- if new_content.get("modified", 0) > self.site.settings.get("modified", 0):
- # Dont store modifications in the far future (more than 10 minute)
- self.site.settings["modified"] = min(time.time() + 60 * 10, new_content["modified"])
-
- return changed, deleted
-
- def removeContent(self, inner_path):
- inner_dir = helper.getDirname(inner_path)
- try:
- content = self.contents[inner_path]
- files = dict(
- content.get("files", {}),
- **content.get("files_optional", {})
- )
- except Exception as err:
- self.log.debug("Error loading %s for removeContent: %s" % (inner_path, Debug.formatException(err)))
- files = {}
- files["content.json"] = True
- # Deleting files that no longer in content.json
- for file_relative_path in files:
- file_inner_path = inner_dir + file_relative_path
- try:
- self.site.storage.delete(file_inner_path)
- self.log.debug("Deleted file: %s" % file_inner_path)
- except Exception as err:
- self.log.debug("Error deleting file %s: %s" % (file_inner_path, err))
- try:
- self.site.storage.deleteDir(inner_dir)
- except Exception as err:
- self.log.debug("Error deleting dir %s: %s" % (inner_dir, err))
-
- try:
- del self.contents[inner_path]
- except Exception as err:
- self.log.debug("Error key from contents: %s" % inner_path)
-
- # Get total size of site
- # Return: 32819 (size of files in kb)
- def getTotalSize(self, ignore=None):
- return self.contents.db.getTotalSize(self.site, ignore)
-
- def listModified(self, after=None, before=None):
- return self.contents.db.listModified(self.site, after=after, before=before)
-
- def listContents(self, inner_path="content.json", user_files=False):
- if inner_path not in self.contents:
- return []
- back = [inner_path]
- content_inner_dir = helper.getDirname(inner_path)
- for relative_path in list(self.contents[inner_path].get("includes", {}).keys()):
- include_inner_path = content_inner_dir + relative_path
- back += self.listContents(include_inner_path)
- return back
-
- # Returns if file with the given modification date is archived or not
- def isArchived(self, inner_path, modified):
- match = re.match(r"(.*)/(.*?)/", inner_path)
- if not match:
- return False
- user_contents_inner_path = match.group(1) + "/content.json"
- relative_directory = match.group(2)
-
- file_info = self.getFileInfo(user_contents_inner_path)
- if file_info:
- time_archived_before = file_info.get("archived_before", 0)
- time_directory_archived = file_info.get("archived", {}).get(relative_directory, 0)
- if modified <= time_archived_before or modified <= time_directory_archived:
- return True
- else:
- return False
- else:
- return False
-
- def isDownloaded(self, inner_path, hash_id=None):
- if not hash_id:
- file_info = self.getFileInfo(inner_path)
- if not file_info or "sha512" not in file_info:
- return False
- hash_id = self.hashfield.getHashId(file_info["sha512"])
- return hash_id in self.hashfield
-
- # Is modified since signing
- def isModified(self, inner_path):
- s = time.time()
- if inner_path.endswith("content.json"):
- try:
- is_valid = self.verifyFile(inner_path, self.site.storage.open(inner_path), ignore_same=False)
- if is_valid:
- is_modified = False
- else:
- is_modified = True
- except VerifyError:
- is_modified = True
- else:
- try:
- self.verifyFile(inner_path, self.site.storage.open(inner_path), ignore_same=False)
- is_modified = False
- except VerifyError:
- is_modified = True
- return is_modified
-
- # Find the file info line from self.contents
- # Return: { "sha512": "c29d73d...21f518", "size": 41 , "content_inner_path": "content.json"}
- def getFileInfo(self, inner_path, new_file=False):
- dirs = inner_path.split("/") # Parent dirs of content.json
- inner_path_parts = [dirs.pop()] # Filename relative to content.json
- while True:
- content_inner_path = "%s/content.json" % "/".join(dirs)
- content_inner_path = content_inner_path.strip("/")
- content = self.contents.get(content_inner_path)
-
- # Check in files
- if content and "files" in content:
- back = content["files"].get("/".join(inner_path_parts))
- if back:
- back["content_inner_path"] = content_inner_path
- back["optional"] = False
- back["relative_path"] = "/".join(inner_path_parts)
- return back
-
- # Check in optional files
- if content and "files_optional" in content: # Check if file in this content.json
- back = content["files_optional"].get("/".join(inner_path_parts))
- if back:
- back["content_inner_path"] = content_inner_path
- back["optional"] = True
- back["relative_path"] = "/".join(inner_path_parts)
- return back
-
- # Return the rules if user dir
- if content and "user_contents" in content:
- back = content["user_contents"]
- content_inner_path_dir = helper.getDirname(content_inner_path)
- relative_content_path = inner_path[len(content_inner_path_dir):]
- user_auth_address_match = re.match(r"([A-Za-z0-9]+)/.*", relative_content_path)
- if user_auth_address_match:
- user_auth_address = user_auth_address_match.group(1)
- back["content_inner_path"] = "%s%s/content.json" % (content_inner_path_dir, user_auth_address)
- else:
- back["content_inner_path"] = content_inner_path_dir + "content.json"
- back["optional"] = None
- back["relative_path"] = "/".join(inner_path_parts)
- return back
-
- if new_file and content:
- back = {}
- back["content_inner_path"] = content_inner_path
- back["relative_path"] = "/".join(inner_path_parts)
- back["optional"] = None
- return back
-
- # No inner path in this dir, lets try the parent dir
- if dirs:
- inner_path_parts.insert(0, dirs.pop())
- else: # No more parent dirs
- break
-
- # Not found
- return False
-
- # Get rules for the file
- # Return: The rules for the file or False if not allowed
- def getRules(self, inner_path, content=None):
- if not inner_path.endswith("content.json"): # Find the files content.json first
- file_info = self.getFileInfo(inner_path)
- if not file_info:
- return False # File not found
- inner_path = file_info["content_inner_path"]
-
- if inner_path == "content.json": # Root content.json
- rules = {}
- rules["signers"] = self.getValidSigners(inner_path, content)
- return rules
-
- dirs = inner_path.split("/") # Parent dirs of content.json
- inner_path_parts = [dirs.pop()] # Filename relative to content.json
- inner_path_parts.insert(0, dirs.pop()) # Dont check in self dir
- while True:
- content_inner_path = "%s/content.json" % "/".join(dirs)
- parent_content = self.contents.get(content_inner_path.strip("/"))
- if parent_content and "includes" in parent_content:
- return parent_content["includes"].get("/".join(inner_path_parts))
- elif parent_content and "user_contents" in parent_content:
- return self.getUserContentRules(parent_content, inner_path, content)
- else: # No inner path in this dir, lets try the parent dir
- if dirs:
- inner_path_parts.insert(0, dirs.pop())
- else: # No more parent dirs
- break
-
- return False
-
- # Get rules for a user file
- # Return: The rules of the file or False if not allowed
- def getUserContentRules(self, parent_content, inner_path, content):
- user_contents = parent_content["user_contents"]
-
- # Delivered for directory
- if "inner_path" in parent_content:
- parent_content_dir = helper.getDirname(parent_content["inner_path"])
- user_address = re.match(r"([A-Za-z0-9]*?)/", inner_path[len(parent_content_dir):]).group(1)
- else:
- user_address = re.match(r".*/([A-Za-z0-9]*?)/.*?$", inner_path).group(1)
-
- try:
- if not content:
- content = self.site.storage.loadJson(inner_path) # Read the file if no content specified
- user_urn = "%s/%s" % (content["cert_auth_type"], content["cert_user_id"]) # web/nofish@zeroid.bit
- cert_user_id = content["cert_user_id"]
- except Exception: # Content.json not exist
- user_urn = "n-a/n-a"
- cert_user_id = "n-a"
-
- if user_address in user_contents["permissions"]:
- rules = copy.copy(user_contents["permissions"].get(user_address, {})) # Default rules based on address
- else:
- rules = copy.copy(user_contents["permissions"].get(cert_user_id, {})) # Default rules based on username
-
- if rules is False:
- banned = True
- rules = {}
- else:
- banned = False
- if "signers" in rules:
- rules["signers"] = rules["signers"][:] # Make copy of the signers
- for permission_pattern, permission_rules in list(user_contents["permission_rules"].items()): # Regexp rules
- if not SafeRe.match(permission_pattern, user_urn):
- continue # Rule is not valid for user
- # Update rules if its better than current recorded ones
- for key, val in permission_rules.items():
- if key not in rules:
- if type(val) is list:
- rules[key] = val[:] # Make copy
- else:
- rules[key] = val
- elif type(val) is int: # Int, update if larger
- if val > rules[key]:
- rules[key] = val
- elif hasattr(val, "startswith"): # String, update if longer
- if len(val) > len(rules[key]):
- rules[key] = val
- elif type(val) is list: # List, append
- rules[key] += val
-
- # Accepted cert signers
- rules["cert_signers"] = user_contents.get("cert_signers", {})
- rules["cert_signers_pattern"] = user_contents.get("cert_signers_pattern")
-
- if "signers" not in rules:
- rules["signers"] = []
-
- if not banned:
- rules["signers"].append(user_address) # Add user as valid signer
- rules["user_address"] = user_address
- rules["includes_allowed"] = False
-
- return rules
-
- # Get diffs for changed files
- def getDiffs(self, inner_path, limit=30 * 1024, update_files=True):
- if inner_path not in self.contents:
- return {}
- diffs = {}
- content_inner_path_dir = helper.getDirname(inner_path)
- for file_relative_path in self.contents[inner_path].get("files", {}):
- file_inner_path = content_inner_path_dir + file_relative_path
- if self.site.storage.isFile(file_inner_path + "-new"): # New version present
- diffs[file_relative_path] = Diff.diff(
- list(self.site.storage.open(file_inner_path)),
- list(self.site.storage.open(file_inner_path + "-new")),
- limit=limit
- )
- if update_files:
- self.site.storage.delete(file_inner_path)
- self.site.storage.rename(file_inner_path + "-new", file_inner_path)
- if self.site.storage.isFile(file_inner_path + "-old"): # Old version present
- diffs[file_relative_path] = Diff.diff(
- list(self.site.storage.open(file_inner_path + "-old")),
- list(self.site.storage.open(file_inner_path)),
- limit=limit
- )
- if update_files:
- self.site.storage.delete(file_inner_path + "-old")
- return diffs
-
- def hashFile(self, dir_inner_path, file_relative_path, optional=False):
- back = {}
- file_inner_path = dir_inner_path + "/" + file_relative_path
-
- file_path = self.site.storage.getPath(file_inner_path)
- file_size = os.path.getsize(file_path)
- sha512sum = CryptHash.sha512sum(file_path) # Calculate sha512 sum of file
- if optional and not self.hashfield.hasHash(sha512sum):
- self.optionalDownloaded(file_inner_path, self.hashfield.getHashId(sha512sum), file_size, own=True)
-
- back[file_relative_path] = {"sha512": sha512sum, "size": os.path.getsize(file_path)}
- return back
-
- def isValidRelativePath(self, relative_path):
- if ".." in relative_path.replace("\\", "/").split("/"):
- return False
- elif len(relative_path) > 255:
- return False
- elif relative_path[0] in ("/", "\\"): # Starts with
- return False
- elif relative_path[-1] in (".", " "): # Ends with
- return False
- elif re.match(r".*(^|/)(CON|PRN|AUX|NUL|COM[1-9]|LPT[1-9]|CONOUT\$|CONIN\$)(\.|/|$)", relative_path, re.IGNORECASE): # Protected on Windows
- return False
- else:
- return re.match(r"^[^\x00-\x1F\"*:<>?\\|]+$", relative_path)
-
- def sanitizePath(self, inner_path):
- return re.sub("[\x00-\x1F\"*:<>?\\|]", "", inner_path)
-
- # Hash files in directory
- def hashFiles(self, dir_inner_path, ignore_pattern=None, optional_pattern=None):
- files_node = {}
- files_optional_node = {}
- db_inner_path = self.site.storage.getDbFile()
- if dir_inner_path and not self.isValidRelativePath(dir_inner_path):
- ignored = True
- self.log.error("- [ERROR] Only ascii encoded directories allowed: %s" % dir_inner_path)
-
- for file_relative_path in self.site.storage.walk(dir_inner_path, ignore_pattern):
- file_name = helper.getFilename(file_relative_path)
-
- ignored = optional = False
- if file_name == "content.json":
- ignored = True
- elif file_name.startswith(".") or file_name.endswith("-old") or file_name.endswith("-new"):
- ignored = True
- elif not self.isValidRelativePath(file_relative_path):
- ignored = True
- self.log.error("- [ERROR] Invalid filename: %s" % file_relative_path)
- elif dir_inner_path == "" and db_inner_path and file_relative_path.startswith(db_inner_path):
- ignored = True
- elif optional_pattern and SafeRe.match(optional_pattern, file_relative_path):
- optional = True
-
- if ignored: # Ignore content.json, defined regexp and files starting with .
- self.log.info("- [SKIPPED] %s" % file_relative_path)
- else:
- if optional:
- self.log.info("- [OPTIONAL] %s" % file_relative_path)
- files_optional_node.update(
- self.hashFile(dir_inner_path, file_relative_path, optional=True)
- )
- else:
- self.log.info("- %s" % file_relative_path)
- files_node.update(
- self.hashFile(dir_inner_path, file_relative_path)
- )
- return files_node, files_optional_node
-
- # Create and sign a content.json
- # Return: The new content if filewrite = False
- def sign(self, inner_path="content.json", privatekey=None, filewrite=True, update_changed_files=False, extend=None, remove_missing_optional=False):
- if not inner_path.endswith("content.json"):
- raise SignError("Invalid file name, you can only sign content.json files")
-
- if inner_path in self.contents:
- content = self.contents.get(inner_path)
- if content and content.get("cert_sign", False) is None and self.site.storage.isFile(inner_path):
- # Recover cert_sign from file
- content["cert_sign"] = self.site.storage.loadJson(inner_path).get("cert_sign")
- else:
- content = None
- if not content: # Content not exist yet, load default one
- self.log.info("File %s not exist yet, loading default values..." % inner_path)
-
- if self.site.storage.isFile(inner_path):
- content = self.site.storage.loadJson(inner_path)
- if "files" not in content:
- content["files"] = {}
- if "signs" not in content:
- content["signs"] = {}
- else:
- content = {"files": {}, "signs": {}} # Default content.json
-
- if inner_path == "content.json": # It's the root content.json, add some more fields
- content["title"] = "%s - ZeroNet_" % self.site.address
- content["description"] = ""
- content["signs_required"] = 1
- content["ignore"] = ""
-
- if extend:
- # Add extend keys if not exists
- for key, val in list(extend.items()):
- if not content.get(key):
- content[key] = val
- self.log.info("Extending content.json with: %s" % key)
-
- directory = helper.getDirname(self.site.storage.getPath(inner_path))
- inner_directory = helper.getDirname(inner_path)
- self.log.info("Opening site data directory: %s..." % directory)
-
- changed_files = [inner_path]
- files_node, files_optional_node = self.hashFiles(
- helper.getDirname(inner_path), content.get("ignore"), content.get("optional")
- )
-
- if not remove_missing_optional:
- for file_inner_path, file_details in content.get("files_optional", {}).items():
- if file_inner_path not in files_optional_node:
- files_optional_node[file_inner_path] = file_details
-
- # Find changed files
- files_merged = files_node.copy()
- files_merged.update(files_optional_node)
- for file_relative_path, file_details in files_merged.items():
- old_hash = content.get("files", {}).get(file_relative_path, {}).get("sha512")
- new_hash = files_merged[file_relative_path]["sha512"]
- if old_hash != new_hash:
- changed_files.append(inner_directory + file_relative_path)
-
- self.log.debug("Changed files: %s" % changed_files)
- if update_changed_files:
- for file_path in changed_files:
- self.site.storage.onUpdated(file_path)
-
- # Generate new content.json
- self.log.info("Adding timestamp and sha512sums to new content.json...")
-
- new_content = content.copy() # Create a copy of current content.json
- new_content["files"] = files_node # Add files sha512 hash
- if files_optional_node:
- new_content["files_optional"] = files_optional_node
- elif "files_optional" in new_content:
- del new_content["files_optional"]
-
- new_content["modified"] = int(time.time()) # Add timestamp
- if inner_path == "content.json":
- new_content["zeronet_version"] = config.version
- new_content["signs_required"] = content.get("signs_required", 1)
-
- new_content["address"] = self.site.address
- new_content["inner_path"] = inner_path
-
- # Verify private key
- from Crypt import CryptBitcoin
- self.log.info("Verifying private key...")
- privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey)
- valid_signers = self.getValidSigners(inner_path, new_content)
- if privatekey_address not in valid_signers:
- raise SignError(
- "Private key invalid! Valid signers: %s, Private key address: %s" %
- (valid_signers, privatekey_address)
- )
- self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers))
-
- if inner_path == "content.json" and privatekey_address == self.site.address:
- # If signing using the root key, then sign the valid signers
- signers_data = "%s:%s" % (new_content["signs_required"], ",".join(valid_signers))
- new_content["signers_sign"] = CryptBitcoin.sign(str(signers_data), privatekey)
- if not new_content["signers_sign"]:
- self.log.info("Old style address, signers_sign is none")
-
- self.log.info("Signing %s..." % inner_path)
-
- if "signs" in new_content:
- del(new_content["signs"]) # Delete old signs
- if "sign" in new_content:
- del(new_content["sign"]) # Delete old sign (backward compatibility)
-
- sign_content = json.dumps(new_content, sort_keys=True)
- sign = CryptBitcoin.sign(sign_content, privatekey)
- # new_content["signs"] = content.get("signs", {}) # TODO: Multisig
- if sign: # If signing is successful (not an old address)
- new_content["signs"] = {}
- new_content["signs"][privatekey_address] = sign
-
- self.verifyContent(inner_path, new_content)
-
- if filewrite:
- self.log.info("Saving to %s..." % inner_path)
- self.site.storage.writeJson(inner_path, new_content)
- self.contents[inner_path] = new_content
-
- self.log.info("File %s signed!" % inner_path)
-
- if filewrite: # Written to file
- return True
- else: # Return the new content
- return new_content
-
- # The valid signers of content.json file
- # Return: ["1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6", "13ReyhCsjhpuCVahn1DHdf6eMqqEVev162"]
- def getValidSigners(self, inner_path, content=None):
- valid_signers = []
- if inner_path == "content.json": # Root content.json
- if "content.json" in self.contents and "signers" in self.contents["content.json"]:
- valid_signers += self.contents["content.json"]["signers"][:]
- else:
- rules = self.getRules(inner_path, content)
- if rules and "signers" in rules:
- valid_signers += rules["signers"]
-
- if self.site.address not in valid_signers:
- valid_signers.append(self.site.address) # Site address always valid
- return valid_signers
-
- # Return: The required number of valid signs for the content.json
- def getSignsRequired(self, inner_path, content=None):
- return 1 # Todo: Multisig
-
- def verifyCertSign(self, user_address, user_auth_type, user_name, issuer_address, sign):
- from Crypt import CryptBitcoin
- cert_subject = "%s#%s/%s" % (user_address, user_auth_type, user_name)
- return CryptBitcoin.verify(cert_subject, issuer_address, sign)
-
- def verifyCert(self, inner_path, content):
- rules = self.getRules(inner_path, content)
-
- if not rules:
- raise VerifyError("No rules for this file")
-
- if not rules.get("cert_signers") and not rules.get("cert_signers_pattern"):
- return True # Does not need cert
-
- if "cert_user_id" not in content:
- raise VerifyError("Missing cert_user_id")
-
- if content["cert_user_id"].count("@") != 1:
- raise VerifyError("Invalid domain in cert_user_id")
-
- name, domain = content["cert_user_id"].rsplit("@", 1)
- cert_address = rules["cert_signers"].get(domain)
- if not cert_address: # Unknown Cert signer
- if rules.get("cert_signers_pattern") and SafeRe.match(rules["cert_signers_pattern"], domain):
- cert_address = domain
- else:
- raise VerifyError("Invalid cert signer: %s" % domain)
-
- return self.verifyCertSign(rules["user_address"], content["cert_auth_type"], name, cert_address, content["cert_sign"])
-
- # Checks if the content.json content is valid
- # Return: True or False
- def verifyContent(self, inner_path, content):
- content_size = len(json.dumps(content, indent=1)) + sum([file["size"] for file in list(content["files"].values()) if file["size"] >= 0]) # Size of new content
- # Calculate old content size
- old_content = self.contents.get(inner_path)
- if old_content:
- old_content_size = len(json.dumps(old_content, indent=1)) + sum([file["size"] for file in list(old_content.get("files", {}).values())])
- old_content_size_optional = sum([file["size"] for file in list(old_content.get("files_optional", {}).values())])
- else:
- old_content_size = 0
- old_content_size_optional = 0
-
- # Reset site site on first content.json
- if not old_content and inner_path == "content.json":
- self.site.settings["size"] = 0
-
- content_size_optional = sum([file["size"] for file in list(content.get("files_optional", {}).values()) if file["size"] >= 0])
- site_size = self.site.settings["size"] - old_content_size + content_size # Site size without old content plus the new
- site_size_optional = self.site.settings["size_optional"] - old_content_size_optional + content_size_optional # Site size without old content plus the new
-
- site_size_limit = self.site.getSizeLimit() * 1024 * 1024
-
- # Check site address
- if content.get("address") and content["address"] != self.site.address:
- raise VerifyError("Wrong site address: %s != %s" % (content["address"], self.site.address))
-
- # Check file inner path
- if content.get("inner_path") and content["inner_path"] != inner_path:
- raise VerifyError("Wrong inner_path: %s" % content["inner_path"])
-
- # If our content.json file bigger than the size limit throw error
- if inner_path == "content.json":
- content_size_file = len(json.dumps(content, indent=1))
- if content_size_file > site_size_limit:
- # Save site size to display warning
- self.site.settings["size"] = site_size
- task = self.site.worker_manager.tasks.findTask(inner_path)
- if task: # Dont try to download from other peers
- self.site.worker_manager.failTask(task)
- raise VerifyError("Content too large %s B > %s B, aborting task..." % (site_size, site_size_limit))
-
- # Verify valid filenames
- for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()):
- if not self.isValidRelativePath(file_relative_path):
- raise VerifyError("Invalid relative path: %s" % file_relative_path)
-
- if inner_path == "content.json":
- self.site.settings["size"] = site_size
- self.site.settings["size_optional"] = site_size_optional
- return True # Root content.json is passed
- else:
- if self.verifyContentInclude(inner_path, content, content_size, content_size_optional):
- self.site.settings["size"] = site_size
- self.site.settings["size_optional"] = site_size_optional
- return True
- else:
- raise VerifyError("Content verify error")
-
- def verifyContentInclude(self, inner_path, content, content_size, content_size_optional):
- # Load include details
- rules = self.getRules(inner_path, content)
- if not rules:
- raise VerifyError("No rules")
-
- # Check include size limit
- if rules.get("max_size") is not None: # Include size limit
- if content_size > rules["max_size"]:
- raise VerifyError("Include too large %sB > %sB" % (content_size, rules["max_size"]))
-
- if rules.get("max_size_optional") is not None: # Include optional files limit
- if content_size_optional > rules["max_size_optional"]:
- raise VerifyError("Include optional files too large %sB > %sB" % (
- content_size_optional, rules["max_size_optional"])
- )
-
- # Filename limit
- if rules.get("files_allowed"):
- for file_inner_path in list(content["files"].keys()):
- if not SafeRe.match(r"^%s$" % rules["files_allowed"], file_inner_path):
- raise VerifyError("File not allowed: %s" % file_inner_path)
-
- if rules.get("files_allowed_optional"):
- for file_inner_path in list(content.get("files_optional", {}).keys()):
- if not SafeRe.match(r"^%s$" % rules["files_allowed_optional"], file_inner_path):
- raise VerifyError("Optional file not allowed: %s" % file_inner_path)
-
- # Check if content includes allowed
- if rules.get("includes_allowed") is False and content.get("includes"):
- raise VerifyError("Includes not allowed")
-
- return True # All good
-
- # Verify file validity
- # Return: None = Same as before, False = Invalid, True = Valid
- def verifyFile(self, inner_path, file, ignore_same=True):
- if inner_path.endswith("content.json"): # content.json: Check using sign
- from Crypt import CryptBitcoin
- try:
- if type(file) is dict:
- new_content = file
- else:
- try:
- if sys.version_info.major == 3 and sys.version_info.minor < 6:
- new_content = json.loads(file.read().decode("utf8"))
- else:
- new_content = json.load(file)
- except Exception as err:
- raise VerifyError("Invalid json file: %s" % err)
- if inner_path in self.contents:
- old_content = self.contents.get(inner_path, {"modified": 0})
- # Checks if its newer the ours
- if old_content["modified"] == new_content["modified"] and ignore_same: # Ignore, have the same content.json
- return None
- elif old_content["modified"] > new_content["modified"]: # We have newer
- raise VerifyError(
- "We have newer (Our: %s, Sent: %s)" %
- (old_content["modified"], new_content["modified"])
- )
- if new_content["modified"] > time.time() + 60 * 60 * 24: # Content modified in the far future (allow 1 day+)
- raise VerifyError("Modify timestamp is in the far future!")
- if self.isArchived(inner_path, new_content["modified"]):
- if inner_path in self.site.bad_files:
- del self.site.bad_files[inner_path]
- raise VerifyError("This file is archived!")
- # Check sign
- sign = new_content.get("sign")
- signs = new_content.get("signs", {})
- if "sign" in new_content:
- del(new_content["sign"]) # The file signed without the sign
- if "signs" in new_content:
- del(new_content["signs"]) # The file signed without the signs
-
- sign_content = json.dumps(new_content, sort_keys=True) # Dump the json to string to remove whitepsace
-
- # Fix float representation error on Android
- modified = new_content["modified"]
- if config.fix_float_decimals and type(modified) is float and not str(modified).endswith(".0"):
- modified_fixed = "{:.6f}".format(modified).strip("0.")
- sign_content = sign_content.replace(
- '"modified": %s' % repr(modified),
- '"modified": %s' % modified_fixed
- )
-
- if signs: # New style signing
- valid_signers = self.getValidSigners(inner_path, new_content)
- signs_required = self.getSignsRequired(inner_path, new_content)
-
- if inner_path == "content.json" and len(valid_signers) > 1: # Check signers_sign on root content.json
- signers_data = "%s:%s" % (signs_required, ",".join(valid_signers))
- if not CryptBitcoin.verify(signers_data, self.site.address, new_content["signers_sign"]):
- raise VerifyError("Invalid signers_sign!")
-
- if inner_path != "content.json" and not self.verifyCert(inner_path, new_content): # Check if cert valid
- raise VerifyError("Invalid cert!")
-
- valid_signs = 0
- for address in valid_signers:
- if address in signs:
- valid_signs += CryptBitcoin.verify(sign_content, address, signs[address])
- if valid_signs >= signs_required:
- break # Break if we has enough signs
- if valid_signs < signs_required:
- raise VerifyError("Valid signs: %s/%s" % (valid_signs, signs_required))
- else:
- return self.verifyContent(inner_path, new_content)
- else: # Old style signing
- raise VerifyError("Invalid old-style sign")
-
- except Exception as err:
- self.log.warning("%s: verify sign error: %s" % (inner_path, Debug.formatException(err)))
- raise err
-
- else: # Check using sha512 hash
- file_info = self.getFileInfo(inner_path)
- if file_info:
- if CryptHash.sha512sum(file) != file_info.get("sha512", ""):
- raise VerifyError("Invalid hash")
-
- if file_info.get("size", 0) != file.tell():
- raise VerifyError(
- "File size does not match %s <> %s" %
- (inner_path, file.tell(), file_info.get("size", 0))
- )
-
- return True
-
- else: # File not in content.json
- raise VerifyError("File not in content.json")
-
- def optionalDelete(self, inner_path):
- self.site.storage.delete(inner_path)
-
- def optionalDownloaded(self, inner_path, hash_id, size=None, own=False):
- if size is None:
- size = self.site.storage.getSize(inner_path)
-
- done = self.hashfield.appendHashId(hash_id)
- self.site.settings["optional_downloaded"] += size
- return done
-
- def optionalRemoved(self, inner_path, hash_id, size=None):
- if size is None:
- size = self.site.storage.getSize(inner_path)
- done = self.hashfield.removeHashId(hash_id)
-
- self.site.settings["optional_downloaded"] -= size
- return done
-
- def optionalRenamed(self, inner_path_old, inner_path_new):
- return True
diff --git a/src/Content/__init__.py b/src/Content/__init__.py
deleted file mode 100644
index fbbd39f4..00000000
--- a/src/Content/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .ContentManager import ContentManager
\ No newline at end of file
diff --git a/src/Crypt/Crypt.py b/src/Crypt/Crypt.py
deleted file mode 100644
index 7d7d3659..00000000
--- a/src/Crypt/Crypt.py
+++ /dev/null
@@ -1,4 +0,0 @@
-from Config import config
-from util import ThreadPool
-
-thread_pool_crypt = ThreadPool.ThreadPool(config.threads_crypt)
\ No newline at end of file
diff --git a/src/Crypt/CryptBitcoin.py b/src/Crypt/CryptBitcoin.py
deleted file mode 100644
index 68b2caa2..00000000
--- a/src/Crypt/CryptBitcoin.py
+++ /dev/null
@@ -1,101 +0,0 @@
-import logging
-import base64
-import binascii
-import time
-import hashlib
-
-from util.Electrum import dbl_format
-from Config import config
-
-import util.OpensslFindPatch
-
-lib_verify_best = "sslcrypto"
-
-from lib import sslcrypto
-sslcurve_native = sslcrypto.ecc.get_curve("secp256k1")
-sslcurve_fallback = sslcrypto.fallback.ecc.get_curve("secp256k1")
-sslcurve = sslcurve_native
-
-def loadLib(lib_name, silent=False):
- global sslcurve, libsecp256k1message, lib_verify_best
- if lib_name == "libsecp256k1":
- s = time.time()
- from lib import libsecp256k1message
- import coincurve
- lib_verify_best = "libsecp256k1"
- if not silent:
- logging.info(
- "Libsecpk256k1 loaded: %s in %.3fs" %
- (type(coincurve._libsecp256k1.lib).__name__, time.time() - s)
- )
- elif lib_name == "sslcrypto":
- sslcurve = sslcurve_native
- if sslcurve_native == sslcurve_fallback:
- logging.warning("SSLCurve fallback loaded instead of native")
- elif lib_name == "sslcrypto_fallback":
- sslcurve = sslcurve_fallback
-
-try:
- if not config.use_libsecp256k1:
- raise Exception("Disabled by config")
- loadLib("libsecp256k1")
- lib_verify_best = "libsecp256k1"
-except Exception as err:
- logging.info("Libsecp256k1 load failed: %s" % err)
-
-
-def newPrivatekey(): # Return new private key
- return sslcurve.private_to_wif(sslcurve.new_private_key()).decode()
-
-
-def newSeed():
- return binascii.hexlify(sslcurve.new_private_key()).decode()
-
-
-def hdPrivatekey(seed, child):
- # Too large child id could cause problems
- privatekey_bin = sslcurve.derive_child(seed.encode(), child % 100000000)
- return sslcurve.private_to_wif(privatekey_bin).decode()
-
-
-def privatekeyToAddress(privatekey): # Return address from private key
- try:
- if len(privatekey) == 64:
- privatekey_bin = bytes.fromhex(privatekey)
- else:
- privatekey_bin = sslcurve.wif_to_private(privatekey.encode())
- return sslcurve.private_to_address(privatekey_bin).decode()
- except Exception: # Invalid privatekey
- return False
-
-
-def sign(data, privatekey): # Return sign to data using private key
- if privatekey.startswith("23") and len(privatekey) > 52:
- return None # Old style private key not supported
- return base64.b64encode(sslcurve.sign(
- data.encode(),
- sslcurve.wif_to_private(privatekey.encode()),
- recoverable=True,
- hash=dbl_format
- )).decode()
-
-
-def verify(data, valid_address, sign, lib_verify=None): # Verify data using address and sign
- if not lib_verify:
- lib_verify = lib_verify_best
-
- if not sign:
- return False
-
- if lib_verify == "libsecp256k1":
- sign_address = libsecp256k1message.recover_address(data.encode("utf8"), sign).decode("utf8")
- elif lib_verify in ("sslcrypto", "sslcrypto_fallback"):
- publickey = sslcurve.recover(base64.b64decode(sign), data.encode(), hash=dbl_format)
- sign_address = sslcurve.public_to_address(publickey).decode()
- else:
- raise Exception("No library enabled for signature verification")
-
- if type(valid_address) is list: # Any address in the list
- return sign_address in valid_address
- else: # One possible address
- return sign_address == valid_address
diff --git a/src/Crypt/CryptConnection.py b/src/Crypt/CryptConnection.py
deleted file mode 100644
index ebbc6295..00000000
--- a/src/Crypt/CryptConnection.py
+++ /dev/null
@@ -1,217 +0,0 @@
-import sys
-import logging
-import os
-import ssl
-import hashlib
-import random
-
-from Config import config
-from util import helper
-
-
-class CryptConnectionManager:
- def __init__(self):
- if config.openssl_bin_file:
- self.openssl_bin = config.openssl_bin_file
- elif sys.platform.startswith("win"):
- self.openssl_bin = "tools\\openssl\\openssl.exe"
- elif config.dist_type.startswith("bundle_linux"):
- self.openssl_bin = "../runtime/bin/openssl"
- else:
- self.openssl_bin = "openssl"
-
- self.context_client = None
- self.context_server = None
-
- self.openssl_conf_template = "src/lib/openssl/openssl.cnf"
- self.openssl_conf = config.data_dir + "/openssl.cnf"
-
- self.openssl_env = {
- "OPENSSL_CONF": self.openssl_conf,
- "RANDFILE": config.data_dir + "/openssl-rand.tmp"
- }
-
- self.crypt_supported = [] # Supported cryptos
-
- self.cacert_pem = config.data_dir + "/cacert-rsa.pem"
- self.cakey_pem = config.data_dir + "/cakey-rsa.pem"
- self.cert_pem = config.data_dir + "/cert-rsa.pem"
- self.cert_csr = config.data_dir + "/cert-rsa.csr"
- self.key_pem = config.data_dir + "/key-rsa.pem"
-
- self.log = logging.getLogger("CryptConnectionManager")
- self.log.debug("Version: %s" % ssl.OPENSSL_VERSION)
-
- self.fakedomains = [
- "yahoo.com", "amazon.com", "live.com", "microsoft.com", "mail.ru", "csdn.net", "bing.com",
- "amazon.co.jp", "office.com", "imdb.com", "msn.com", "samsung.com", "huawei.com", "ztedevices.com",
- "godaddy.com", "w3.org", "gravatar.com", "creativecommons.org", "hatena.ne.jp",
- "adobe.com", "opera.com", "apache.org", "rambler.ru", "one.com", "nationalgeographic.com",
- "networksolutions.com", "php.net", "python.org", "phoca.cz", "debian.org", "ubuntu.com",
- "nazwa.pl", "symantec.com"
- ]
-
- def createSslContexts(self):
- if self.context_server and self.context_client:
- return False
- ciphers = "ECDHE-RSA-CHACHA20-POLY1305:ECDHE-RSA-AES128-GCM-SHA256:AES128-SHA256:AES256-SHA:"
- ciphers += "!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK"
-
- if hasattr(ssl, "PROTOCOL_TLS"):
- protocol = ssl.PROTOCOL_TLS
- else:
- protocol = ssl.PROTOCOL_TLSv1_2
- self.context_client = ssl.SSLContext(protocol)
- self.context_client.check_hostname = False
- self.context_client.verify_mode = ssl.CERT_NONE
-
- self.context_server = ssl.SSLContext(protocol)
- self.context_server.load_cert_chain(self.cert_pem, self.key_pem)
-
- for ctx in (self.context_client, self.context_server):
- ctx.set_ciphers(ciphers)
- ctx.options |= ssl.OP_NO_COMPRESSION
- try:
- ctx.set_alpn_protocols(["h2", "http/1.1"])
- ctx.set_npn_protocols(["h2", "http/1.1"])
- except Exception:
- pass
-
- # Select crypt that supported by both sides
- # Return: Name of the crypto
- def selectCrypt(self, client_supported):
- for crypt in self.crypt_supported:
- if crypt in client_supported:
- return crypt
- return False
-
- # Wrap socket for crypt
- # Return: wrapped socket
- def wrapSocket(self, sock, crypt, server=False, cert_pin=None):
- if crypt == "tls-rsa":
- if server:
- sock_wrapped = self.context_server.wrap_socket(sock, server_side=True)
- else:
- sock_wrapped = self.context_client.wrap_socket(sock, server_hostname=random.choice(self.fakedomains))
- if cert_pin:
- cert_hash = hashlib.sha256(sock_wrapped.getpeercert(True)).hexdigest()
- if cert_hash != cert_pin:
- raise Exception("Socket certificate does not match (%s != %s)" % (cert_hash, cert_pin))
- return sock_wrapped
- else:
- return sock
-
- def removeCerts(self):
- if config.keep_ssl_cert:
- return False
- for file_name in ["cert-rsa.pem", "key-rsa.pem", "cacert-rsa.pem", "cakey-rsa.pem", "cacert-rsa.srl", "cert-rsa.csr", "openssl-rand.tmp"]:
- file_path = "%s/%s" % (config.data_dir, file_name)
- if os.path.isfile(file_path):
- os.unlink(file_path)
-
- # Load and create cert files is necessary
- def loadCerts(self):
- if config.disable_encryption:
- return False
-
- if self.createSslRsaCert() and "tls-rsa" not in self.crypt_supported:
- self.crypt_supported.append("tls-rsa")
-
- # Try to create RSA server cert + sign for connection encryption
- # Return: True on success
- def createSslRsaCert(self):
- casubjects = [
- "/C=US/O=Amazon/OU=Server CA 1B/CN=Amazon",
- "/C=US/O=Let's Encrypt/CN=Let's Encrypt Authority X3",
- "/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert SHA2 High Assurance Server CA",
- "/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO RSA Domain Validation Secure Server CA"
- ]
- self.openssl_env['CN'] = random.choice(self.fakedomains)
-
- if os.path.isfile(self.cert_pem) and os.path.isfile(self.key_pem):
- self.createSslContexts()
- return True # Files already exits
-
- import subprocess
-
- # Replace variables in config template
- conf_template = open(self.openssl_conf_template).read()
- conf_template = conf_template.replace("$ENV::CN", self.openssl_env['CN'])
- open(self.openssl_conf, "w").write(conf_template)
-
- # Generate CAcert and CAkey
- cmd_params = helper.shellquote(
- self.openssl_bin,
- self.openssl_conf,
- random.choice(casubjects),
- self.cakey_pem,
- self.cacert_pem
- )
- cmd = "%s req -new -newkey rsa:2048 -days 3650 -nodes -x509 -config %s -subj %s -keyout %s -out %s -batch" % cmd_params
- self.log.debug("Generating RSA CAcert and CAkey PEM files...")
- self.log.debug("Running: %s" % cmd)
- proc = subprocess.Popen(
- cmd, shell=True, stderr=subprocess.STDOUT,
- stdout=subprocess.PIPE, env=self.openssl_env
- )
- back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "")
- proc.wait()
-
- if not (os.path.isfile(self.cacert_pem) and os.path.isfile(self.cakey_pem)):
- self.log.error("RSA ECC SSL CAcert generation failed, CAcert or CAkey files not exist. (%s)" % back)
- return False
- else:
- self.log.debug("Result: %s" % back)
-
- # Generate certificate key and signing request
- cmd_params = helper.shellquote(
- self.openssl_bin,
- self.key_pem,
- self.cert_csr,
- "/CN=" + self.openssl_env['CN'],
- self.openssl_conf,
- )
- cmd = "%s req -new -newkey rsa:2048 -keyout %s -out %s -subj %s -sha256 -nodes -batch -config %s" % cmd_params
- self.log.debug("Generating certificate key and signing request...")
- proc = subprocess.Popen(
- cmd, shell=True, stderr=subprocess.STDOUT,
- stdout=subprocess.PIPE, env=self.openssl_env
- )
- back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "")
- proc.wait()
- self.log.debug("Running: %s\n%s" % (cmd, back))
-
- # Sign request and generate certificate
- cmd_params = helper.shellquote(
- self.openssl_bin,
- self.cert_csr,
- self.cacert_pem,
- self.cakey_pem,
- self.cert_pem,
- self.openssl_conf
- )
- cmd = "%s x509 -req -in %s -CA %s -CAkey %s -set_serial 01 -out %s -days 730 -sha256 -extensions x509_ext -extfile %s" % cmd_params
- self.log.debug("Generating RSA cert...")
- proc = subprocess.Popen(
- cmd, shell=True, stderr=subprocess.STDOUT,
- stdout=subprocess.PIPE, env=self.openssl_env
- )
- back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "")
- proc.wait()
- self.log.debug("Running: %s\n%s" % (cmd, back))
-
- if os.path.isfile(self.cert_pem) and os.path.isfile(self.key_pem):
- self.createSslContexts()
-
- # Remove no longer necessary files
- os.unlink(self.openssl_conf)
- os.unlink(self.cacert_pem)
- os.unlink(self.cakey_pem)
- os.unlink(self.cert_csr)
-
- return True
- else:
- self.log.error("RSA ECC SSL cert generation failed, cert or key files not exist.")
-
-
-manager = CryptConnectionManager()
diff --git a/src/Crypt/CryptHash.py b/src/Crypt/CryptHash.py
deleted file mode 100644
index f5901fb8..00000000
--- a/src/Crypt/CryptHash.py
+++ /dev/null
@@ -1,56 +0,0 @@
-import hashlib
-import os
-import base64
-
-
-def sha512sum(file, blocksize=65536, format="hexdigest"):
- if type(file) is str: # Filename specified
- file = open(file, "rb")
- hash = hashlib.sha512()
- for block in iter(lambda: file.read(blocksize), b""):
- hash.update(block)
-
- # Truncate to 256bits is good enough
- if format == "hexdigest":
- return hash.hexdigest()[0:64]
- else:
- return hash.digest()[0:32]
-
-
-def sha256sum(file, blocksize=65536):
- if type(file) is str: # Filename specified
- file = open(file, "rb")
- hash = hashlib.sha256()
- for block in iter(lambda: file.read(blocksize), b""):
- hash.update(block)
- return hash.hexdigest()
-
-
-def random(length=64, encoding="hex"):
- if encoding == "base64": # Characters: A-Za-z0-9
- hash = hashlib.sha512(os.urandom(256)).digest()
- return base64.b64encode(hash).decode("ascii").replace("+", "").replace("/", "").replace("=", "")[0:length]
- else: # Characters: a-f0-9 (faster)
- return hashlib.sha512(os.urandom(256)).hexdigest()[0:length]
-
-
-# Sha512 truncated to 256bits
-class Sha512t:
- def __init__(self, data):
- if data:
- self.sha512 = hashlib.sha512(data)
- else:
- self.sha512 = hashlib.sha512()
-
- def hexdigest(self):
- return self.sha512.hexdigest()[0:64]
-
- def digest(self):
- return self.sha512.digest()[0:32]
-
- def update(self, data):
- return self.sha512.update(data)
-
-
-def sha512t(data=None):
- return Sha512t(data)
diff --git a/src/Crypt/CryptRsa.py b/src/Crypt/CryptRsa.py
deleted file mode 100644
index 494c4d24..00000000
--- a/src/Crypt/CryptRsa.py
+++ /dev/null
@@ -1,38 +0,0 @@
-import base64
-import hashlib
-
-def sign(data, privatekey):
- import rsa
- from rsa import pkcs1
-
- if "BEGIN RSA PRIVATE KEY" not in privatekey:
- privatekey = "-----BEGIN RSA PRIVATE KEY-----\n%s\n-----END RSA PRIVATE KEY-----" % privatekey
-
- priv = rsa.PrivateKey.load_pkcs1(privatekey)
- sign = rsa.pkcs1.sign(data, priv, 'SHA-256')
- return sign
-
-def verify(data, publickey, sign):
- import rsa
- from rsa import pkcs1
-
- pub = rsa.PublicKey.load_pkcs1(publickey, format="DER")
- try:
- valid = rsa.pkcs1.verify(data, sign, pub)
- except pkcs1.VerificationError:
- valid = False
- return valid
-
-def privatekeyToPublickey(privatekey):
- import rsa
- from rsa import pkcs1
-
- if "BEGIN RSA PRIVATE KEY" not in privatekey:
- privatekey = "-----BEGIN RSA PRIVATE KEY-----\n%s\n-----END RSA PRIVATE KEY-----" % privatekey
-
- priv = rsa.PrivateKey.load_pkcs1(privatekey)
- pub = rsa.PublicKey(priv.n, priv.e)
- return pub.save_pkcs1("DER")
-
-def publickeyToOnion(publickey):
- return base64.b32encode(hashlib.sha1(publickey).digest()[:10]).lower().decode("ascii")
diff --git a/src/Crypt/__init__.py b/src/Crypt/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/src/Db/Db.py b/src/Db/Db.py
deleted file mode 100644
index d1d9ce15..00000000
--- a/src/Db/Db.py
+++ /dev/null
@@ -1,519 +0,0 @@
-import sqlite3
-import json
-import time
-import logging
-import re
-import os
-import atexit
-import threading
-import sys
-import weakref
-import errno
-
-import gevent
-
-from Debug import Debug
-from .DbCursor import DbCursor
-from util import SafeRe
-from util import helper
-from util import ThreadPool
-from Config import config
-
-thread_pool_db = ThreadPool.ThreadPool(config.threads_db)
-
-next_db_id = 0
-opened_dbs = []
-
-
-# Close idle databases to save some memory
-def dbCleanup():
- while 1:
- time.sleep(60 * 5)
- for db in opened_dbs[:]:
- idle = time.time() - db.last_query_time
- if idle > 60 * 5 and db.close_idle:
- db.close("Cleanup")
-
-
-def dbCommitCheck():
- while 1:
- time.sleep(5)
- for db in opened_dbs[:]:
- if not db.need_commit:
- continue
-
- success = db.commit("Interval")
- if success:
- db.need_commit = False
- time.sleep(0.1)
-
-
-def dbCloseAll():
- for db in opened_dbs[:]:
- db.close("Close all")
-
-
-gevent.spawn(dbCleanup)
-gevent.spawn(dbCommitCheck)
-atexit.register(dbCloseAll)
-
-
-class DbTableError(Exception):
- def __init__(self, message, table):
- super().__init__(message)
- self.table = table
-
-
-class Db(object):
-
- def __init__(self, schema, db_path, close_idle=False):
- global next_db_id
- self.db_path = db_path
- self.db_dir = os.path.dirname(db_path) + "/"
- self.schema = schema
- self.schema["version"] = self.schema.get("version", 1)
- self.conn = None
- self.cur = None
- self.cursors = weakref.WeakSet()
- self.id = next_db_id
- next_db_id += 1
- self.progress_sleeping = False
- self.commiting = False
- self.log = logging.getLogger("Db#%s:%s" % (self.id, schema["db_name"]))
- self.table_names = None
- self.collect_stats = False
- self.foreign_keys = False
- self.need_commit = False
- self.query_stats = {}
- self.db_keyvalues = {}
- self.delayed_queue = []
- self.delayed_queue_thread = None
- self.close_idle = close_idle
- self.last_query_time = time.time()
- self.last_sleep_time = time.time()
- self.num_execute_since_sleep = 0
- self.lock = ThreadPool.Lock()
- self.connect_lock = ThreadPool.Lock()
-
- def __repr__(self):
- return "" % (id(self), self.db_path, self.close_idle)
-
- def connect(self):
- self.connect_lock.acquire(True)
- try:
- if self.conn:
- self.log.debug("Already connected, connection ignored")
- return
-
- if self not in opened_dbs:
- opened_dbs.append(self)
- s = time.time()
- try: # Directory not exist yet
- os.makedirs(self.db_dir)
- self.log.debug("Created Db path: %s" % self.db_dir)
- except OSError as err:
- if err.errno != errno.EEXIST:
- raise err
- if not os.path.isfile(self.db_path):
- self.log.debug("Db file not exist yet: %s" % self.db_path)
- self.conn = sqlite3.connect(self.db_path, isolation_level="DEFERRED", check_same_thread=False)
- self.conn.row_factory = sqlite3.Row
- self.conn.set_progress_handler(self.progress, 5000000)
- self.conn.execute('PRAGMA journal_mode=WAL')
- if self.foreign_keys:
- self.conn.execute("PRAGMA foreign_keys = ON")
- self.cur = self.getCursor()
-
- self.log.debug(
- "Connected to %s in %.3fs (opened: %s, sqlite version: %s)..." %
- (self.db_path, time.time() - s, len(opened_dbs), sqlite3.version)
- )
- self.log.debug("Connect by thread: %s" % threading.current_thread().ident)
- self.log.debug("Connect called by %s" % Debug.formatStack())
- finally:
- self.connect_lock.release()
-
- def getConn(self):
- if not self.conn:
- self.connect()
- return self.conn
-
- def progress(self, *args, **kwargs):
- self.progress_sleeping = True
- time.sleep(0.001)
- self.progress_sleeping = False
-
- # Execute query using dbcursor
- def execute(self, query, params=None):
- if not self.conn:
- self.connect()
- return self.cur.execute(query, params)
-
- @thread_pool_db.wrap
- def commit(self, reason="Unknown"):
- if self.progress_sleeping:
- self.log.debug("Commit ignored: Progress sleeping")
- return False
-
- if not self.conn:
- self.log.debug("Commit ignored: No connection")
- return False
-
- if self.commiting:
- self.log.debug("Commit ignored: Already commiting")
- return False
-
- try:
- s = time.time()
- self.commiting = True
- self.conn.commit()
- self.log.debug("Commited in %.3fs (reason: %s)" % (time.time() - s, reason))
- return True
- except Exception as err:
- if "SQL statements in progress" in str(err):
- self.log.warning("Commit delayed: %s (reason: %s)" % (Debug.formatException(err), reason))
- else:
- self.log.error("Commit error: %s (reason: %s)" % (Debug.formatException(err), reason))
- return False
- finally:
- self.commiting = False
-
- def insertOrUpdate(self, *args, **kwargs):
- if not self.conn:
- self.connect()
- return self.cur.insertOrUpdate(*args, **kwargs)
-
- def executeDelayed(self, *args, **kwargs):
- if not self.delayed_queue_thread:
- self.delayed_queue_thread = gevent.spawn_later(1, self.processDelayed)
- self.delayed_queue.append(("execute", (args, kwargs)))
-
- def insertOrUpdateDelayed(self, *args, **kwargs):
- if not self.delayed_queue:
- gevent.spawn_later(1, self.processDelayed)
- self.delayed_queue.append(("insertOrUpdate", (args, kwargs)))
-
- def processDelayed(self):
- if not self.delayed_queue:
- self.log.debug("processDelayed aborted")
- return
- if not self.conn:
- self.connect()
-
- s = time.time()
- cur = self.getCursor()
- for command, params in self.delayed_queue:
- if command == "insertOrUpdate":
- cur.insertOrUpdate(*params[0], **params[1])
- else:
- cur.execute(*params[0], **params[1])
-
- if len(self.delayed_queue) > 10:
- self.log.debug("Processed %s delayed queue in %.3fs" % (len(self.delayed_queue), time.time() - s))
- self.delayed_queue = []
- self.delayed_queue_thread = None
-
- def close(self, reason="Unknown"):
- if not self.conn:
- return False
- self.connect_lock.acquire()
- s = time.time()
- if self.delayed_queue:
- self.processDelayed()
- if self in opened_dbs:
- opened_dbs.remove(self)
- self.need_commit = False
- self.commit("Closing: %s" % reason)
- self.log.debug("Close called by %s" % Debug.formatStack())
- for i in range(5):
- if len(self.cursors) == 0:
- break
- self.log.debug("Pending cursors: %s" % len(self.cursors))
- time.sleep(0.1 * i)
- if len(self.cursors):
- self.log.debug("Killing cursors: %s" % len(self.cursors))
- self.conn.interrupt()
-
- if self.cur:
- self.cur.close()
- if self.conn:
- ThreadPool.main_loop.call(self.conn.close)
- self.conn = None
- self.cur = None
- self.log.debug("%s closed (reason: %s) in %.3fs, opened: %s" % (self.db_path, reason, time.time() - s, len(opened_dbs)))
- self.connect_lock.release()
- return True
-
- # Gets a cursor object to database
- # Return: Cursor class
- def getCursor(self):
- if not self.conn:
- self.connect()
-
- cur = DbCursor(self)
- return cur
-
- def getSharedCursor(self):
- if not self.conn:
- self.connect()
- return self.cur
-
- # Get the table version
- # Return: Table version or None if not exist
- def getTableVersion(self, table_name):
- if not self.db_keyvalues: # Get db keyvalues
- try:
- res = self.execute("SELECT * FROM keyvalue WHERE json_id=0") # json_id = 0 is internal keyvalues
- except sqlite3.OperationalError as err: # Table not exist
- self.log.debug("Query table version error: %s" % err)
- return False
-
- for row in res:
- self.db_keyvalues[row["key"]] = row["value"]
-
- return self.db_keyvalues.get("table.%s.version" % table_name, 0)
-
- # Check Db tables
- # Return: Changed table names
- def checkTables(self):
- s = time.time()
- changed_tables = []
-
- cur = self.getSharedCursor()
-
- # Check internal tables
- # Check keyvalue table
- changed = cur.needTable("keyvalue", [
- ["keyvalue_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
- ["key", "TEXT"],
- ["value", "INTEGER"],
- ["json_id", "INTEGER"],
- ], [
- "CREATE UNIQUE INDEX key_id ON keyvalue(json_id, key)"
- ], version=self.schema["version"])
- if changed:
- changed_tables.append("keyvalue")
-
- # Create json table if no custom one defined
- if "json" not in self.schema.get("tables", {}):
- if self.schema["version"] == 1:
- changed = cur.needTable("json", [
- ["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
- ["path", "VARCHAR(255)"]
- ], [
- "CREATE UNIQUE INDEX path ON json(path)"
- ], version=self.schema["version"])
- elif self.schema["version"] == 2:
- changed = cur.needTable("json", [
- ["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
- ["directory", "VARCHAR(255)"],
- ["file_name", "VARCHAR(255)"]
- ], [
- "CREATE UNIQUE INDEX path ON json(directory, file_name)"
- ], version=self.schema["version"])
- elif self.schema["version"] == 3:
- changed = cur.needTable("json", [
- ["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
- ["site", "VARCHAR(255)"],
- ["directory", "VARCHAR(255)"],
- ["file_name", "VARCHAR(255)"]
- ], [
- "CREATE UNIQUE INDEX path ON json(directory, site, file_name)"
- ], version=self.schema["version"])
- if changed:
- changed_tables.append("json")
-
- # Check schema tables
- for table_name, table_settings in self.schema.get("tables", {}).items():
- try:
- indexes = table_settings.get("indexes", [])
- version = table_settings.get("schema_changed", 0)
- changed = cur.needTable(
- table_name, table_settings["cols"],
- indexes, version=version
- )
- if changed:
- changed_tables.append(table_name)
- except Exception as err:
- self.log.error("Error creating table %s: %s" % (table_name, Debug.formatException(err)))
- raise DbTableError(err, table_name)
-
- self.log.debug("Db check done in %.3fs, changed tables: %s" % (time.time() - s, changed_tables))
- if changed_tables:
- self.db_keyvalues = {} # Refresh table version cache
-
- return changed_tables
-
- # Update json file to db
- # Return: True if matched
- def updateJson(self, file_path, file=None, cur=None):
- if not file_path.startswith(self.db_dir):
- return False # Not from the db dir: Skipping
- relative_path = file_path[len(self.db_dir):] # File path realative to db file
-
- # Check if filename matches any of mappings in schema
- matched_maps = []
- for match, map_settings in self.schema["maps"].items():
- try:
- if SafeRe.match(match, relative_path):
- matched_maps.append(map_settings)
- except SafeRe.UnsafePatternError as err:
- self.log.error(err)
-
- # No match found for the file
- if not matched_maps:
- return False
-
- # Load the json file
- try:
- if file is None: # Open file is not file object passed
- file = open(file_path, "rb")
-
- if file is False: # File deleted
- data = {}
- else:
- if file_path.endswith("json.gz"):
- file = helper.limitedGzipFile(fileobj=file)
-
- if sys.version_info.major == 3 and sys.version_info.minor < 6:
- data = json.loads(file.read().decode("utf8"))
- else:
- data = json.load(file)
- except Exception as err:
- self.log.debug("Json file %s load error: %s" % (file_path, err))
- data = {}
-
- # No cursor specificed
- if not cur:
- cur = self.getSharedCursor()
- cur.logging = False
-
- # Row for current json file if required
- if not data or [dbmap for dbmap in matched_maps if "to_keyvalue" in dbmap or "to_table" in dbmap]:
- json_row = cur.getJsonRow(relative_path)
-
- # Check matched mappings in schema
- for dbmap in matched_maps:
- # Insert non-relational key values
- if dbmap.get("to_keyvalue"):
- # Get current values
- res = cur.execute("SELECT * FROM keyvalue WHERE json_id = ?", (json_row["json_id"],))
- current_keyvalue = {}
- current_keyvalue_id = {}
- for row in res:
- current_keyvalue[row["key"]] = row["value"]
- current_keyvalue_id[row["key"]] = row["keyvalue_id"]
-
- for key in dbmap["to_keyvalue"]:
- if key not in current_keyvalue: # Keyvalue not exist yet in the db
- cur.execute(
- "INSERT INTO keyvalue ?",
- {"key": key, "value": data.get(key), "json_id": json_row["json_id"]}
- )
- elif data.get(key) != current_keyvalue[key]: # Keyvalue different value
- cur.execute(
- "UPDATE keyvalue SET value = ? WHERE keyvalue_id = ?",
- (data.get(key), current_keyvalue_id[key])
- )
-
- # Insert data to json table for easier joins
- if dbmap.get("to_json_table"):
- directory, file_name = re.match("^(.*?)/*([^/]*)$", relative_path).groups()
- data_json_row = dict(cur.getJsonRow(directory + "/" + dbmap.get("file_name", file_name)))
- changed = False
- for key in dbmap["to_json_table"]:
- if data.get(key) != data_json_row.get(key):
- changed = True
- if changed:
- # Add the custom col values
- data_json_row.update({key: val for key, val in data.items() if key in dbmap["to_json_table"]})
- cur.execute("INSERT OR REPLACE INTO json ?", data_json_row)
-
- # Insert data to tables
- for table_settings in dbmap.get("to_table", []):
- if isinstance(table_settings, dict): # Custom settings
- table_name = table_settings["table"] # Table name to insert datas
- node = table_settings.get("node", table_name) # Node keyname in data json file
- key_col = table_settings.get("key_col") # Map dict key as this col
- val_col = table_settings.get("val_col") # Map dict value as this col
- import_cols = table_settings.get("import_cols")
- replaces = table_settings.get("replaces")
- else: # Simple settings
- table_name = table_settings
- node = table_settings
- key_col = None
- val_col = None
- import_cols = None
- replaces = None
-
- # Fill import cols from table cols
- if not import_cols:
- import_cols = set([item[0] for item in self.schema["tables"][table_name]["cols"]])
-
- cur.execute("DELETE FROM %s WHERE json_id = ?" % table_name, (json_row["json_id"],))
-
- if node not in data:
- continue
-
- if key_col: # Map as dict
- for key, val in data[node].items():
- if val_col: # Single value
- cur.execute(
- "INSERT OR REPLACE INTO %s ?" % table_name,
- {key_col: key, val_col: val, "json_id": json_row["json_id"]}
- )
- else: # Multi value
- if type(val) is dict: # Single row
- row = val
- if import_cols:
- row = {key: row[key] for key in row if key in import_cols} # Filter row by import_cols
- row[key_col] = key
- # Replace in value if necessary
- if replaces:
- for replace_key, replace in replaces.items():
- if replace_key in row:
- for replace_from, replace_to in replace.items():
- row[replace_key] = row[replace_key].replace(replace_from, replace_to)
-
- row["json_id"] = json_row["json_id"]
- cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row)
- elif type(val) is list: # Multi row
- for row in val:
- row[key_col] = key
- row["json_id"] = json_row["json_id"]
- cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row)
- else: # Map as list
- for row in data[node]:
- row["json_id"] = json_row["json_id"]
- if import_cols:
- row = {key: row[key] for key in row if key in import_cols} # Filter row by import_cols
- cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row)
-
- # Cleanup json row
- if not data:
- self.log.debug("Cleanup json row for %s" % file_path)
- cur.execute("DELETE FROM json WHERE json_id = %s" % json_row["json_id"])
-
- return True
-
-
-if __name__ == "__main__":
- s = time.time()
- console_log = logging.StreamHandler()
- logging.getLogger('').setLevel(logging.DEBUG)
- logging.getLogger('').addHandler(console_log)
- console_log.setLevel(logging.DEBUG)
- dbjson = Db(json.load(open("zerotalk.schema.json")), "data/users/zerotalk.db")
- dbjson.collect_stats = True
- dbjson.checkTables()
- cur = dbjson.getCursor()
- cur.logging = False
- dbjson.updateJson("data/users/content.json", cur=cur)
- for user_dir in os.listdir("data/users"):
- if os.path.isdir("data/users/%s" % user_dir):
- dbjson.updateJson("data/users/%s/data.json" % user_dir, cur=cur)
- # print ".",
- cur.logging = True
- print("Done in %.3fs" % (time.time() - s))
- for query, stats in sorted(dbjson.query_stats.items()):
- print("-", query, stats)
diff --git a/src/Db/DbCursor.py b/src/Db/DbCursor.py
deleted file mode 100644
index acb8846d..00000000
--- a/src/Db/DbCursor.py
+++ /dev/null
@@ -1,246 +0,0 @@
-import time
-import re
-from util import helper
-
-# Special sqlite cursor
-
-
-class DbCursor:
-
- def __init__(self, db):
- self.db = db
- self.logging = False
-
- def quoteValue(self, value):
- if type(value) is int:
- return str(value)
- else:
- return "'%s'" % value.replace("'", "''")
-
- def parseQuery(self, query, params):
- query_type = query.split(" ", 1)[0].upper()
- if isinstance(params, dict) and "?" in query: # Make easier select and insert by allowing dict params
- if query_type in ("SELECT", "DELETE", "UPDATE"):
- # Convert param dict to SELECT * FROM table WHERE key = ? AND key2 = ? format
- query_wheres = []
- values = []
- for key, value in params.items():
- if type(value) is list:
- if key.startswith("not__"):
- field = key.replace("not__", "")
- operator = "NOT IN"
- else:
- field = key
- operator = "IN"
- if len(value) > 100:
- # Embed values in query to avoid "too many SQL variables" error
- query_values = ",".join(map(helper.sqlquote, value))
- else:
- query_values = ",".join(["?"] * len(value))
- values += value
- query_wheres.append(
- "%s %s (%s)" %
- (field, operator, query_values)
- )
- else:
- if key.startswith("not__"):
- query_wheres.append(key.replace("not__", "") + " != ?")
- elif key.endswith("__like"):
- query_wheres.append(key.replace("__like", "") + " LIKE ?")
- elif key.endswith(">"):
- query_wheres.append(key.replace(">", "") + " > ?")
- elif key.endswith("<"):
- query_wheres.append(key.replace("<", "") + " < ?")
- else:
- query_wheres.append(key + " = ?")
- values.append(value)
- wheres = " AND ".join(query_wheres)
- if wheres == "":
- wheres = "1"
- query = re.sub("(.*)[?]", "\\1 %s" % wheres, query) # Replace the last ?
- params = values
- else:
- # Convert param dict to INSERT INTO table (key, key2) VALUES (?, ?) format
- keys = ", ".join(params.keys())
- values = ", ".join(['?' for key in params.keys()])
- keysvalues = "(%s) VALUES (%s)" % (keys, values)
- query = re.sub("(.*)[?]", "\\1%s" % keysvalues, query) # Replace the last ?
- params = tuple(params.values())
- elif isinstance(params, dict) and ":" in query:
- new_params = dict()
- values = []
- for key, value in params.items():
- if type(value) is list:
- for idx, val in enumerate(value):
- new_params[key + "__" + str(idx)] = val
-
- new_names = [":" + key + "__" + str(idx) for idx in range(len(value))]
- query = re.sub(r":" + re.escape(key) + r"([)\s]|$)", "(%s)%s" % (", ".join(new_names), r"\1"), query)
- else:
- new_params[key] = value
-
- params = new_params
- return query, params
-
- def execute(self, query, params=None):
- query = query.strip()
- while self.db.progress_sleeping or self.db.commiting:
- time.sleep(0.1)
-
- self.db.last_query_time = time.time()
-
- query, params = self.parseQuery(query, params)
-
- cursor = self.db.getConn().cursor()
- self.db.cursors.add(cursor)
- if self.db.lock.locked():
- self.db.log.debug("Locked for %.3fs" % (time.time() - self.db.lock.time_lock))
-
- try:
- s = time.time()
- self.db.lock.acquire(True)
- if query.upper().strip("; ") == "VACUUM":
- self.db.commit("vacuum called")
- if params:
- res = cursor.execute(query, params)
- else:
- res = cursor.execute(query)
- finally:
- self.db.lock.release()
-
- taken_query = time.time() - s
- if self.logging or taken_query > 1:
- if params: # Query has parameters
- self.db.log.debug("Query: " + query + " " + str(params) + " (Done in %.4f)" % (time.time() - s))
- else:
- self.db.log.debug("Query: " + query + " (Done in %.4f)" % (time.time() - s))
-
- # Log query stats
- if self.db.collect_stats:
- if query not in self.db.query_stats:
- self.db.query_stats[query] = {"call": 0, "time": 0.0}
- self.db.query_stats[query]["call"] += 1
- self.db.query_stats[query]["time"] += time.time() - s
-
- query_type = query.split(" ", 1)[0].upper()
- is_update_query = query_type in ["UPDATE", "DELETE", "INSERT", "CREATE"]
- if not self.db.need_commit and is_update_query:
- self.db.need_commit = True
-
- if is_update_query:
- return cursor
- else:
- return res
-
- def executemany(self, query, params):
- while self.db.progress_sleeping or self.db.commiting:
- time.sleep(0.1)
-
- self.db.last_query_time = time.time()
-
- s = time.time()
- cursor = self.db.getConn().cursor()
- self.db.cursors.add(cursor)
-
- try:
- self.db.lock.acquire(True)
- cursor.executemany(query, params)
- finally:
- self.db.lock.release()
-
- taken_query = time.time() - s
- if self.logging or taken_query > 0.1:
- self.db.log.debug("Execute many: %s (Done in %.4f)" % (query, taken_query))
-
- self.db.need_commit = True
-
- return cursor
-
- # Creates on updates a database row without incrementing the rowid
- def insertOrUpdate(self, table, query_sets, query_wheres, oninsert={}):
- sql_sets = ["%s = :%s" % (key, key) for key in query_sets.keys()]
- sql_wheres = ["%s = :%s" % (key, key) for key in query_wheres.keys()]
-
- params = query_sets
- params.update(query_wheres)
- res = self.execute(
- "UPDATE %s SET %s WHERE %s" % (table, ", ".join(sql_sets), " AND ".join(sql_wheres)),
- params
- )
- if res.rowcount == 0:
- params.update(oninsert) # Add insert-only fields
- self.execute("INSERT INTO %s ?" % table, params)
-
- # Create new table
- # Return: True on success
- def createTable(self, table, cols):
- # TODO: Check current structure
- self.execute("DROP TABLE IF EXISTS %s" % table)
- col_definitions = []
- for col_name, col_type in cols:
- col_definitions.append("%s %s" % (col_name, col_type))
-
- self.execute("CREATE TABLE %s (%s)" % (table, ",".join(col_definitions)))
- return True
-
- # Create indexes on table
- # Return: True on success
- def createIndexes(self, table, indexes):
- for index in indexes:
- if not index.strip().upper().startswith("CREATE"):
- self.db.log.error("Index command should start with CREATE: %s" % index)
- continue
- self.execute(index)
-
- # Create table if not exist
- # Return: True if updated
- def needTable(self, table, cols, indexes=None, version=1):
- current_version = self.db.getTableVersion(table)
- if int(current_version) < int(version): # Table need update or not extis
- self.db.log.debug("Table %s outdated...version: %s need: %s, rebuilding..." % (table, current_version, version))
- self.createTable(table, cols)
- if indexes:
- self.createIndexes(table, indexes)
- self.execute(
- "INSERT OR REPLACE INTO keyvalue ?",
- {"json_id": 0, "key": "table.%s.version" % table, "value": version}
- )
- return True
- else: # Not changed
- return False
-
- # Get or create a row for json file
- # Return: The database row
- def getJsonRow(self, file_path):
- directory, file_name = re.match("^(.*?)/*([^/]*)$", file_path).groups()
- if self.db.schema["version"] == 1:
- # One path field
- res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"path": file_path})
- row = res.fetchone()
- if not row: # No row yet, create it
- self.execute("INSERT INTO json ?", {"path": file_path})
- res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"path": file_path})
- row = res.fetchone()
- elif self.db.schema["version"] == 2:
- # Separate directory, file_name (easier join)
- res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"directory": directory, "file_name": file_name})
- row = res.fetchone()
- if not row: # No row yet, create it
- self.execute("INSERT INTO json ?", {"directory": directory, "file_name": file_name})
- res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"directory": directory, "file_name": file_name})
- row = res.fetchone()
- elif self.db.schema["version"] == 3:
- # Separate site, directory, file_name (for merger sites)
- site_address, directory = re.match("^([^/]*)/(.*)$", directory).groups()
- res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"site": site_address, "directory": directory, "file_name": file_name})
- row = res.fetchone()
- if not row: # No row yet, create it
- self.execute("INSERT INTO json ?", {"site": site_address, "directory": directory, "file_name": file_name})
- res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"site": site_address, "directory": directory, "file_name": file_name})
- row = res.fetchone()
- else:
- raise Exception("Dbschema version %s not supported" % self.db.schema.get("version"))
- return row
-
- def close(self):
- pass
diff --git a/src/Db/DbQuery.py b/src/Db/DbQuery.py
deleted file mode 100644
index 3fb5ef73..00000000
--- a/src/Db/DbQuery.py
+++ /dev/null
@@ -1,46 +0,0 @@
-import re
-
-
-# Parse and modify sql queries
-class DbQuery:
- def __init__(self, query):
- self.setQuery(query.strip())
-
- # Split main parts of query
- def parseParts(self, query):
- parts = re.split("(SELECT|FROM|WHERE|ORDER BY|LIMIT)", query)
- parts = [_f for _f in parts if _f] # Remove empty parts
- parts = [s.strip() for s in parts] # Remove whitespace
- return dict(list(zip(parts[0::2], parts[1::2])))
-
- # Parse selected fields SELECT ... FROM
- def parseFields(self, query_select):
- fields = re.findall("([^,]+) AS ([^,]+)", query_select)
- return {key: val.strip() for val, key in fields}
-
- # Parse query conditions WHERE ...
- def parseWheres(self, query_where):
- if " AND " in query_where:
- return query_where.split(" AND ")
- elif query_where:
- return [query_where]
- else:
- return []
-
- # Set the query
- def setQuery(self, query):
- self.parts = self.parseParts(query)
- self.fields = self.parseFields(self.parts["SELECT"])
- self.wheres = self.parseWheres(self.parts.get("WHERE", ""))
-
- # Convert query back to string
- def __str__(self):
- query_parts = []
- for part_name in ["SELECT", "FROM", "WHERE", "ORDER BY", "LIMIT"]:
- if part_name == "WHERE" and self.wheres:
- query_parts.append("WHERE")
- query_parts.append(" AND ".join(self.wheres))
- elif part_name in self.parts:
- query_parts.append(part_name)
- query_parts.append(self.parts[part_name])
- return "\n".join(query_parts)
diff --git a/src/Db/__init__.py b/src/Db/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/src/Debug/Debug.py b/src/Debug/Debug.py
deleted file mode 100644
index 0ec42615..00000000
--- a/src/Debug/Debug.py
+++ /dev/null
@@ -1,186 +0,0 @@
-import sys
-import os
-import re
-from Config import config
-
-
-# Non fatal exception
-class Notify(Exception):
- def __init__(self, message=None):
- if message:
- self.message = message
-
- def __str__(self):
- return self.message
-
-
-# Gevent greenlet.kill accept Exception type
-def createNotifyType(message):
- return type("Notify", (Notify, ), {"message": message})
-
-
-def formatExceptionMessage(err):
- err_type = err.__class__.__name__
- if err.args:
- err_message = err.args[-1]
- else:
- err_message = err.__str__()
- return "%s: %s" % (err_type, err_message)
-
-
-python_lib_dirs = [path.replace("\\", "/") for path in sys.path if re.sub(r".*[\\/]", "", path) in ("site-packages", "dist-packages")]
-python_lib_dirs.append(os.path.dirname(os.__file__).replace("\\", "/")) # TODO: check if returns the correct path for PyPy
-
-root_dir = os.path.realpath(os.path.dirname(__file__) + "/../../")
-root_dir = root_dir.replace("\\", "/")
-
-
-def formatTraceback(items, limit=None, fold_builtin=True):
- back = []
- i = 0
- prev_file_title = ""
- is_prev_builtin = False
-
- for path, line in items:
- i += 1
- is_last = i == len(items)
- path = path.replace("\\", "/")
-
- if path.startswith("src/gevent/"):
- file_title = "/" + path[len("src/gevent/"):]
- is_builtin = True
- is_skippable_builtin = False
- elif path in ("", ""):
- file_title = "(importlib)"
- is_builtin = True
- is_skippable_builtin = True
- else:
- is_skippable_builtin = False
- for base in python_lib_dirs:
- if path.startswith(base + "/"):
- file_title = path[len(base + "/"):]
- module_name, *tail = file_title.split("/")
- if module_name.endswith(".py"):
- module_name = module_name[:-3]
- file_title = "/".join(["<%s>" % module_name] + tail)
- is_builtin = True
- break
- else:
- is_builtin = False
- for base in (root_dir + "/src", root_dir + "/plugins", root_dir):
- if path.startswith(base + "/"):
- file_title = path[len(base + "/"):]
- break
- else:
- # For unknown paths, do our best to hide absolute path
- file_title = path
- for needle in ("/zeronet/", "/core/"):
- if needle in file_title.lower():
- file_title = "?/" + file_title[file_title.lower().rindex(needle) + len(needle):]
-
- # Path compression: A/AB/ABC/X/Y.py -> ABC/X/Y.py
- # E.g.: in 'Db/DbCursor.py' the directory part is unnecessary
- if not file_title.startswith("/"):
- prev_part = ""
- for i, part in enumerate(file_title.split("/") + [""]):
- if not part.startswith(prev_part):
- break
- prev_part = part
- file_title = "/".join(file_title.split("/")[i - 1:])
-
- if is_skippable_builtin and fold_builtin:
- pass
- elif is_builtin and is_prev_builtin and not is_last and fold_builtin:
- if back[-1] != "...":
- back.append("...")
- else:
- if file_title == prev_file_title:
- back.append("%s" % line)
- else:
- back.append("%s line %s" % (file_title, line))
-
- prev_file_title = file_title
- is_prev_builtin = is_builtin
-
- if limit and i >= limit:
- back.append("...")
- break
- return back
-
-
-def formatException(err=None, format="text"):
- import traceback
- if type(err) == Notify:
- return err
- elif type(err) == tuple and err and err[0] is not None: # Passed trackeback info
- exc_type, exc_obj, exc_tb = err
- err = None
- else: # No trackeback info passed, get latest
- exc_type, exc_obj, exc_tb = sys.exc_info()
-
- if not err:
- if hasattr(err, "message"):
- err = exc_obj.message
- else:
- err = exc_obj
-
- tb = formatTraceback([[frame[0], frame[1]] for frame in traceback.extract_tb(exc_tb)])
- if format == "html":
- return "%s: %s%s " % (repr(err), err, " > ".join(tb))
- else:
- return "%s: %s in %s" % (exc_type.__name__, err, " > ".join(tb))
-
-
-def formatStack(limit=None):
- import inspect
- tb = formatTraceback([[frame[1], frame[2]] for frame in inspect.stack()[1:]], limit=limit)
- return " > ".join(tb)
-
-
-# Test if gevent eventloop blocks
-import logging
-import gevent
-import time
-
-
-num_block = 0
-
-
-def testBlock():
- global num_block
- logging.debug("Gevent block checker started")
- last_time = time.time()
- while 1:
- time.sleep(1)
- if time.time() - last_time > 1.1:
- logging.debug("Gevent block detected: %.3fs" % (time.time() - last_time - 1))
- num_block += 1
- last_time = time.time()
-
-
-gevent.spawn(testBlock)
-
-
-if __name__ == "__main__":
- try:
- print(1 / 0)
- except Exception as err:
- print(type(err).__name__)
- print("1/0 error: %s" % formatException(err))
-
- def loadJson():
- json.loads("Errr")
-
- import json
- try:
- loadJson()
- except Exception as err:
- print(err)
- print("Json load error: %s" % formatException(err))
-
- try:
- raise Notify("nothing...")
- except Exception as err:
- print("Notify: %s" % formatException(err))
-
- loadJson()
diff --git a/src/Debug/DebugHook.py b/src/Debug/DebugHook.py
deleted file mode 100644
index d100a3b8..00000000
--- a/src/Debug/DebugHook.py
+++ /dev/null
@@ -1,115 +0,0 @@
-import sys
-import logging
-import signal
-import importlib
-
-import gevent
-import gevent.hub
-
-from Config import config
-from . import Debug
-
-last_error = None
-
-def shutdown(reason="Unknown"):
- logging.info("Shutting down (reason: %s)..." % reason)
- import main
- if "file_server" in dir(main):
- try:
- gevent.spawn(main.file_server.stop)
- if "ui_server" in dir(main):
- gevent.spawn(main.ui_server.stop)
- except Exception as err:
- print("Proper shutdown error: %s" % err)
- sys.exit(0)
- else:
- sys.exit(0)
-
-# Store last error, ignore notify, allow manual error logging
-def handleError(*args, **kwargs):
- global last_error
- if not args: # Manual called
- args = sys.exc_info()
- silent = True
- else:
- silent = False
- if args[0].__name__ != "Notify":
- last_error = args
-
- if args[0].__name__ == "KeyboardInterrupt":
- shutdown("Keyboard interrupt")
- elif not silent and args[0].__name__ != "Notify":
- logging.exception("Unhandled exception")
- if "greenlet.py" not in args[2].tb_frame.f_code.co_filename: # Don't display error twice
- sys.__excepthook__(*args, **kwargs)
-
-
-# Ignore notify errors
-def handleErrorNotify(*args, **kwargs):
- err = args[0]
- if err.__name__ == "KeyboardInterrupt":
- shutdown("Keyboard interrupt")
- elif err.__name__ != "Notify":
- logging.error("Unhandled exception: %s" % Debug.formatException(args))
- sys.__excepthook__(*args, **kwargs)
-
-
-if config.debug: # Keep last error for /Debug
- sys.excepthook = handleError
-else:
- sys.excepthook = handleErrorNotify
-
-
-# Override default error handler to allow silent killing / custom logging
-if "handle_error" in dir(gevent.hub.Hub):
- gevent.hub.Hub._original_handle_error = gevent.hub.Hub.handle_error
-else:
- logging.debug("gevent.hub.Hub.handle_error not found using old gevent hooks")
- OriginalGreenlet = gevent.Greenlet
- class ErrorhookedGreenlet(OriginalGreenlet):
- def _report_error(self, exc_info):
- sys.excepthook(exc_info[0], exc_info[1], exc_info[2])
-
- gevent.Greenlet = gevent.greenlet.Greenlet = ErrorhookedGreenlet
- importlib.reload(gevent)
-
-def handleGreenletError(context, type, value, tb):
- if context.__class__ is tuple and context[0].__class__.__name__ == "ThreadPool":
- # Exceptions in ThreadPool will be handled in the main Thread
- return None
-
- if isinstance(value, str):
- # Cython can raise errors where the value is a plain string
- # e.g., AttributeError, "_semaphore.Semaphore has no attr",
- value = type(value)
-
- if not issubclass(type, gevent.get_hub().NOT_ERROR):
- sys.excepthook(type, value, tb)
-
-gevent.get_hub().handle_error = handleGreenletError
-
-try:
- signal.signal(signal.SIGTERM, lambda signum, stack_frame: shutdown("SIGTERM"))
-except Exception as err:
- logging.debug("Error setting up SIGTERM watcher: %s" % err)
-
-
-if __name__ == "__main__":
- import time
- from gevent import monkey
- monkey.patch_all(thread=False, ssl=False)
- from . import Debug
-
- def sleeper(num):
- print("started", num)
- time.sleep(3)
- raise Exception("Error")
- print("stopped", num)
- thread1 = gevent.spawn(sleeper, 1)
- thread2 = gevent.spawn(sleeper, 2)
- time.sleep(1)
- print("killing...")
- thread1.kill(exception=Debug.Notify("Worker stopped"))
- #thread2.throw(Debug.Notify("Throw"))
- print("killed")
- gevent.joinall([thread1,thread2])
diff --git a/src/Debug/DebugLock.py b/src/Debug/DebugLock.py
deleted file mode 100644
index 9cf22520..00000000
--- a/src/Debug/DebugLock.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import time
-import logging
-
-import gevent.lock
-
-from Debug import Debug
-
-
-class DebugLock:
- def __init__(self, log_after=0.01, name="Lock"):
- self.name = name
- self.log_after = log_after
- self.lock = gevent.lock.Semaphore(1)
- self.release = self.lock.release
-
- def acquire(self, *args, **kwargs):
- s = time.time()
- res = self.lock.acquire(*args, **kwargs)
- time_taken = time.time() - s
- if time_taken >= self.log_after:
- logging.debug("%s: Waited %.3fs after called by %s" %
- (self.name, time_taken, Debug.formatStack())
- )
- return res
diff --git a/src/Debug/DebugMedia.py b/src/Debug/DebugMedia.py
deleted file mode 100644
index a892dc56..00000000
--- a/src/Debug/DebugMedia.py
+++ /dev/null
@@ -1,135 +0,0 @@
-import os
-import subprocess
-import re
-import logging
-import time
-import functools
-
-from Config import config
-from util import helper
-
-
-# Find files with extension in path
-def findfiles(path, find_ext):
- def sorter(f1, f2):
- f1 = f1[0].replace(path, "")
- f2 = f2[0].replace(path, "")
- if f1 == "":
- return 1
- elif f2 == "":
- return -1
- else:
- return helper.cmp(f1.lower(), f2.lower())
-
- for root, dirs, files in sorted(os.walk(path, topdown=False), key=functools.cmp_to_key(sorter)):
- for file in sorted(files):
- file_path = root + "/" + file
- file_ext = file.split(".")[-1]
- if file_ext in find_ext and not file.startswith("all."):
- yield file_path.replace("\\", "/")
-
-
-# Try to find coffeescript compiler in path
-def findCoffeescriptCompiler():
- coffeescript_compiler = None
- try:
- import distutils.spawn
- coffeescript_compiler = helper.shellquote(distutils.spawn.find_executable("coffee")) + " --no-header -p"
- except:
- pass
- if coffeescript_compiler:
- return coffeescript_compiler
- else:
- return False
-
-
-# Generates: all.js: merge *.js, compile coffeescript, all.css: merge *.css, vendor prefix features
-def merge(merged_path):
- merged_path = merged_path.replace("\\", "/")
- merge_dir = os.path.dirname(merged_path)
- s = time.time()
- ext = merged_path.split(".")[-1]
- if ext == "js": # If merging .js find .coffee too
- find_ext = ["js", "coffee"]
- else:
- find_ext = [ext]
-
- # If exist check the other files modification date
- if os.path.isfile(merged_path):
- merged_mtime = os.path.getmtime(merged_path)
- else:
- merged_mtime = 0
-
- changed = {}
- for file_path in findfiles(merge_dir, find_ext):
- if os.path.getmtime(file_path) > merged_mtime + 1:
- changed[file_path] = True
- if not changed:
- return # Assets not changed, nothing to do
-
- old_parts = {}
- if os.path.isfile(merged_path): # Find old parts to avoid unncessary recompile
- merged_old = open(merged_path, "rb").read()
- for match in re.findall(rb"(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL):
- old_parts[match[1].decode()] = match[2].strip(b"\n\r")
-
- logging.debug("Merging %s (changed: %s, old parts: %s)" % (merged_path, changed, len(old_parts)))
- # Merge files
- parts = []
- s_total = time.time()
- for file_path in findfiles(merge_dir, find_ext):
- file_relative_path = file_path.replace(merge_dir + "/", "")
- parts.append(b"\n/* ---- %s ---- */\n\n" % file_relative_path.encode("utf8"))
- if file_path.endswith(".coffee"): # Compile coffee script
- if file_path in changed or file_relative_path not in old_parts: # Only recompile if changed or its not compiled before
- if config.coffeescript_compiler is None:
- config.coffeescript_compiler = findCoffeescriptCompiler()
- if not config.coffeescript_compiler:
- logging.error("No coffeescript compiler defined, skipping compiling %s" % merged_path)
- return False # No coffeescript compiler, skip this file
-
- # Replace / with os separators and escape it
- file_path_escaped = helper.shellquote(file_path.replace("/", os.path.sep))
-
- if "%s" in config.coffeescript_compiler: # Replace %s with coffeescript file
- command = config.coffeescript_compiler.replace("%s", file_path_escaped)
- else: # Put coffeescript file to end
- command = config.coffeescript_compiler + " " + file_path_escaped
-
- # Start compiling
- s = time.time()
- compiler = subprocess.Popen(command, shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
- out = compiler.stdout.read()
- compiler.wait()
- logging.debug("Running: %s (Done in %.2fs)" % (command, time.time() - s))
-
- # Check errors
- if out and out.startswith(b"("): # No error found
- parts.append(out)
- else: # Put error message in place of source code
- error = out
- logging.error("%s Compile error: %s" % (file_relative_path, error))
- error_escaped = re.escape(error).replace(b"\n", b"\\n").replace(br"\\n", br"\n")
- parts.append(
- b"alert('%s compile error: %s');" %
- (file_relative_path.encode(), error_escaped)
- )
- else: # Not changed use the old_part
- parts.append(old_parts[file_relative_path])
- else: # Add to parts
- parts.append(open(file_path, "rb").read())
-
- merged = b"\n".join(parts)
- if ext == "css": # Vendor prefix css
- from lib.cssvendor import cssvendor
- merged = cssvendor.prefix(merged)
- merged = merged.replace(b"\r", b"")
- open(merged_path, "wb").write(merged)
- logging.debug("Merged %s (%.2fs)" % (merged_path, time.time() - s_total))
-
-
-if __name__ == "__main__":
- logging.getLogger().setLevel(logging.DEBUG)
- os.chdir("..")
- config.coffeescript_compiler = r'type "%s" | tools\coffee-node\bin\node.exe tools\coffee-node\bin\coffee --no-header -s -p'
- merge("data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/js/all.js")
diff --git a/src/Debug/DebugReloader.py b/src/Debug/DebugReloader.py
deleted file mode 100644
index 482c7921..00000000
--- a/src/Debug/DebugReloader.py
+++ /dev/null
@@ -1,69 +0,0 @@
-import logging
-import time
-import os
-
-from Config import config
-
-if config.debug and config.action == "main":
- try:
- import watchdog
- import watchdog.observers
- import watchdog.events
- logging.debug("Watchdog fs listener detected, source code autoreload enabled")
- enabled = True
- except Exception as err:
- logging.debug("Watchdog fs listener could not be loaded: %s" % err)
- enabled = False
-else:
- enabled = False
-
-
-class DebugReloader:
- def __init__(self, paths=None):
- if not paths:
- paths = ["src", "plugins", config.data_dir + "/__plugins__"]
- self.log = logging.getLogger("DebugReloader")
- self.last_chaged = 0
- self.callbacks = []
- if enabled:
- self.observer = watchdog.observers.Observer()
- event_handler = watchdog.events.FileSystemEventHandler()
- event_handler.on_modified = event_handler.on_deleted = self.onChanged
- event_handler.on_created = event_handler.on_moved = self.onChanged
- for path in paths:
- if not os.path.isdir(path):
- continue
- self.log.debug("Adding autoreload: %s" % path)
- self.observer.schedule(event_handler, path, recursive=True)
- self.observer.start()
-
- def addCallback(self, f):
- self.callbacks.append(f)
-
- def onChanged(self, evt):
- path = evt.src_path
- ext = path.rsplit(".", 1)[-1]
- if ext not in ["py", "json"] or "Test" in path or time.time() - self.last_chaged < 1.0:
- return False
- self.last_chaged = time.time()
- if os.path.isfile(path):
- time_modified = os.path.getmtime(path)
- else:
- time_modified = 0
- self.log.debug("File changed: %s reloading source code (modified %.3fs ago)" % (evt, time.time() - time_modified))
- if time.time() - time_modified > 5: # Probably it's just an attribute change, ignore it
- return False
-
- time.sleep(0.1) # Wait for lock release
- for callback in self.callbacks:
- try:
- callback()
- except Exception as err:
- self.log.exception(err)
-
- def stop(self):
- if enabled:
- self.observer.stop()
- self.log.debug("Stopped autoreload observer")
-
-watcher = DebugReloader()
diff --git a/src/Debug/__init__.py b/src/Debug/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/src/File/FileRequest.py b/src/File/FileRequest.py
deleted file mode 100644
index 65c335a9..00000000
--- a/src/File/FileRequest.py
+++ /dev/null
@@ -1,448 +0,0 @@
-# Included modules
-import os
-import time
-import json
-import collections
-import itertools
-
-# Third party modules
-import gevent
-
-from Debug import Debug
-from Config import config
-from util import RateLimit
-from util import Msgpack
-from util import helper
-from Plugin import PluginManager
-from contextlib import closing
-
-FILE_BUFF = 1024 * 512
-
-
-class RequestError(Exception):
- pass
-
-
-# Incoming requests
-@PluginManager.acceptPlugins
-class FileRequest(object):
- __slots__ = ("server", "connection", "req_id", "sites", "log", "responded")
-
- def __init__(self, server, connection):
- self.server = server
- self.connection = connection
-
- self.req_id = None
- self.sites = self.server.sites
- self.log = server.log
- self.responded = False # Responded to the request
-
- def send(self, msg, streaming=False):
- if not self.connection.closed:
- self.connection.send(msg, streaming)
-
- def sendRawfile(self, file, read_bytes):
- if not self.connection.closed:
- self.connection.sendRawfile(file, read_bytes)
-
- def response(self, msg, streaming=False):
- if self.responded:
- if config.verbose:
- self.log.debug("Req id %s already responded" % self.req_id)
- return
- if not isinstance(msg, dict): # If msg not a dict create a {"body": msg}
- msg = {"body": msg}
- msg["cmd"] = "response"
- msg["to"] = self.req_id
- self.responded = True
- self.send(msg, streaming=streaming)
-
- # Route file requests
- def route(self, cmd, req_id, params):
- self.req_id = req_id
- # Don't allow other sites than locked
- if "site" in params and self.connection.target_onion:
- valid_sites = self.connection.getValidSites()
- if params["site"] not in valid_sites and valid_sites != ["global"]:
- self.response({"error": "Invalid site"})
- self.connection.log(
- "Site lock violation: %s not in %s, target onion: %s" %
- (params["site"], valid_sites, self.connection.target_onion)
- )
- self.connection.badAction(5)
- return False
-
- if cmd == "update":
- event = "%s update %s %s" % (self.connection.id, params["site"], params["inner_path"])
- # If called more than once within 15 sec only keep the last update
- RateLimit.callAsync(event, max(self.connection.bad_actions, 15), self.actionUpdate, params)
- else:
- func_name = "action" + cmd[0].upper() + cmd[1:]
- func = getattr(self, func_name, None)
- if cmd not in ["getFile", "streamFile"]: # Skip IO bound functions
- if self.connection.cpu_time > 0.5:
- self.log.debug(
- "Delay %s %s, cpu_time used by connection: %.3fs" %
- (self.connection.ip, cmd, self.connection.cpu_time)
- )
- time.sleep(self.connection.cpu_time)
- if self.connection.cpu_time > 5:
- self.connection.close("Cpu time: %.3fs" % self.connection.cpu_time)
- s = time.time()
- if func:
- func(params)
- else:
- self.actionUnknown(cmd, params)
-
- if cmd not in ["getFile", "streamFile"]:
- taken = time.time() - s
- taken_sent = self.connection.last_sent_time - self.connection.last_send_time
- self.connection.cpu_time += taken - taken_sent
-
- # Update a site file request
- def actionUpdate(self, params):
- site = self.sites.get(params["site"])
- if not site or not site.isServing(): # Site unknown or not serving
- self.response({"error": "Unknown site"})
- self.connection.badAction(1)
- self.connection.badAction(5)
- return False
-
- inner_path = params.get("inner_path", "")
- current_content_modified = site.content_manager.contents.get(inner_path, {}).get("modified", 0)
- body = params["body"]
-
- if not inner_path.endswith("content.json"):
- self.response({"error": "Only content.json update allowed"})
- self.connection.badAction(5)
- return
-
- should_validate_content = True
- if "modified" in params and params["modified"] <= current_content_modified:
- should_validate_content = False
- valid = None # Same or earlier content as we have
- elif not body: # No body sent, we have to download it first
- site.log.debug("Missing body from update for file %s, downloading ..." % inner_path)
- peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update") # Add or get peer
- try:
- body = peer.getFile(site.address, inner_path).read()
- except Exception as err:
- site.log.debug("Can't download updated file %s: %s" % (inner_path, err))
- self.response({"error": "File invalid update: Can't download updaed file"})
- self.connection.badAction(5)
- return
-
- if should_validate_content:
- try:
- content = json.loads(body.decode())
- except Exception as err:
- site.log.debug("Update for %s is invalid JSON: %s" % (inner_path, err))
- self.response({"error": "File invalid JSON"})
- self.connection.badAction(5)
- return
-
- file_uri = "%s/%s:%s" % (site.address, inner_path, content["modified"])
-
- if self.server.files_parsing.get(file_uri): # Check if we already working on it
- valid = None # Same file
- else:
- try:
- valid = site.content_manager.verifyFile(inner_path, content)
- except Exception as err:
- site.log.debug("Update for %s is invalid: %s" % (inner_path, err))
- error = err
- valid = False
-
- if valid is True: # Valid and changed
- site.log.info("Update for %s looks valid, saving..." % inner_path)
- self.server.files_parsing[file_uri] = True
- site.storage.write(inner_path, body)
- del params["body"]
-
- site.onFileDone(inner_path) # Trigger filedone
-
- if inner_path.endswith("content.json"): # Download every changed file from peer
- peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update") # Add or get peer
- # On complete publish to other peers
- diffs = params.get("diffs", {})
- site.onComplete.once(lambda: site.publish(inner_path=inner_path, diffs=diffs, limit=3), "publish_%s" % inner_path)
-
- # Load new content file and download changed files in new thread
- def downloader():
- site.downloadContent(inner_path, peer=peer, diffs=params.get("diffs", {}))
- del self.server.files_parsing[file_uri]
-
- gevent.spawn(downloader)
- else:
- del self.server.files_parsing[file_uri]
-
- self.response({"ok": "Thanks, file %s updated!" % inner_path})
- self.connection.goodAction()
-
- elif valid is None: # Not changed
- peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update old") # Add or get peer
- if peer:
- if not peer.connection:
- peer.connect(self.connection) # Assign current connection to peer
- if inner_path in site.content_manager.contents:
- peer.last_content_json_update = site.content_manager.contents[inner_path]["modified"]
- if config.verbose:
- site.log.debug(
- "Same version, adding new peer for locked files: %s, tasks: %s" %
- (peer.key, len(site.worker_manager.tasks))
- )
- for task in site.worker_manager.tasks: # New peer add to every ongoing task
- if task["peers"] and not task["optional_hash_id"]:
- # Download file from this peer too if its peer locked
- site.needFile(task["inner_path"], peer=peer, update=True, blocking=False)
-
- self.response({"ok": "File not changed"})
- self.connection.badAction()
-
- else: # Invalid sign or sha hash
- self.response({"error": "File %s invalid: %s" % (inner_path, error)})
- self.connection.badAction(5)
-
- def isReadable(self, site, inner_path, file, pos):
- return True
-
- # Send file content request
- def handleGetFile(self, params, streaming=False):
- site = self.sites.get(params["site"])
- if not site or not site.isServing(): # Site unknown or not serving
- self.response({"error": "Unknown site"})
- self.connection.badAction(5)
- return False
- try:
- file_path = site.storage.getPath(params["inner_path"])
- if streaming:
- file_obj = site.storage.open(params["inner_path"])
- else:
- file_obj = Msgpack.FilePart(file_path, "rb")
-
- with file_obj as file:
- file.seek(params["location"])
- read_bytes = params.get("read_bytes", FILE_BUFF)
- file_size = os.fstat(file.fileno()).st_size
-
- if file_size > read_bytes: # Check if file is readable at current position (for big files)
- if not self.isReadable(site, params["inner_path"], file, params["location"]):
- raise RequestError("File not readable at position: %s" % params["location"])
- else:
- if params.get("file_size") and params["file_size"] != file_size:
- self.connection.badAction(2)
- raise RequestError("File size does not match: %sB != %sB" % (params["file_size"], file_size))
-
- if not streaming:
- file.read_bytes = read_bytes
-
- if params["location"] > file_size:
- self.connection.badAction(5)
- raise RequestError("Bad file location")
-
- if streaming:
- back = {
- "size": file_size,
- "location": min(file.tell() + read_bytes, file_size),
- "stream_bytes": min(read_bytes, file_size - params["location"])
- }
- self.response(back)
- self.sendRawfile(file, read_bytes=read_bytes)
- else:
- back = {
- "body": file,
- "size": file_size,
- "location": min(file.tell() + file.read_bytes, file_size)
- }
- self.response(back, streaming=True)
-
- bytes_sent = min(read_bytes, file_size - params["location"]) # Number of bytes we going to send
- site.settings["bytes_sent"] = site.settings.get("bytes_sent", 0) + bytes_sent
- if config.debug_socket:
- self.log.debug("File %s at position %s sent %s bytes" % (file_path, params["location"], bytes_sent))
-
- # Add peer to site if not added before
- connected_peer = site.addPeer(self.connection.ip, self.connection.port, source="request")
- if connected_peer: # Just added
- connected_peer.connect(self.connection) # Assign current connection to peer
-
- return {"bytes_sent": bytes_sent, "file_size": file_size, "location": params["location"]}
-
- except RequestError as err:
- self.log.debug("GetFile %s %s %s request error: %s" % (self.connection, params["site"], params["inner_path"], Debug.formatException(err)))
- self.response({"error": "File read error: %s" % err})
- except OSError as err:
- if config.verbose:
- self.log.debug("GetFile read error: %s" % Debug.formatException(err))
- self.response({"error": "File read error"})
- return False
- except Exception as err:
- self.log.error("GetFile exception: %s" % Debug.formatException(err))
- self.response({"error": "File read exception"})
- return False
-
- def actionGetFile(self, params):
- return self.handleGetFile(params)
-
- def actionStreamFile(self, params):
- return self.handleGetFile(params, streaming=True)
-
- # Peer exchange request
- def actionPex(self, params):
- site = self.sites.get(params["site"])
- if not site or not site.isServing(): # Site unknown or not serving
- self.response({"error": "Unknown site"})
- self.connection.badAction(5)
- return False
-
- got_peer_keys = []
- added = 0
-
- # Add requester peer to site
- connected_peer = site.addPeer(self.connection.ip, self.connection.port, source="request")
-
- if connected_peer: # It was not registered before
- added += 1
- connected_peer.connect(self.connection) # Assign current connection to peer
-
- # Add sent peers to site
- for packed_address in itertools.chain(params.get("peers", []), params.get("peers_ipv6", [])):
- address = helper.unpackAddress(packed_address)
- got_peer_keys.append("%s:%s" % address)
- if site.addPeer(*address, source="pex"):
- added += 1
-
- # Add sent onion peers to site
- for packed_address in params.get("peers_onion", []):
- address = helper.unpackOnionAddress(packed_address)
- got_peer_keys.append("%s:%s" % address)
- if site.addPeer(*address, source="pex"):
- added += 1
-
- # Send back peers that is not in the sent list and connectable (not port 0)
- packed_peers = helper.packPeers(site.getConnectablePeers(params["need"], ignore=got_peer_keys, allow_private=False))
-
- if added:
- site.worker_manager.onPeers()
- if config.verbose:
- self.log.debug(
- "Added %s peers to %s using pex, sending back %s" %
- (added, site, {key: len(val) for key, val in packed_peers.items()})
- )
-
- back = {
- "peers": packed_peers["ipv4"],
- "peers_ipv6": packed_peers["ipv6"],
- "peers_onion": packed_peers["onion"]
- }
-
- self.response(back)
-
- # Get modified content.json files since
- def actionListModified(self, params):
- site = self.sites.get(params["site"])
- if not site or not site.isServing(): # Site unknown or not serving
- self.response({"error": "Unknown site"})
- self.connection.badAction(5)
- return False
- modified_files = site.content_manager.listModified(params["since"])
-
- # Add peer to site if not added before
- connected_peer = site.addPeer(self.connection.ip, self.connection.port, source="request")
- if connected_peer: # Just added
- connected_peer.connect(self.connection) # Assign current connection to peer
-
- self.response({"modified_files": modified_files})
-
- def actionGetHashfield(self, params):
- site = self.sites.get(params["site"])
- if not site or not site.isServing(): # Site unknown or not serving
- self.response({"error": "Unknown site"})
- self.connection.badAction(5)
- return False
-
- # Add peer to site if not added before
- peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="request")
- if not peer.connection: # Just added
- peer.connect(self.connection) # Assign current connection to peer
-
- peer.time_my_hashfield_sent = time.time() # Don't send again if not changed
-
- self.response({"hashfield_raw": site.content_manager.hashfield.tobytes()})
-
- def findHashIds(self, site, hash_ids, limit=100):
- back = collections.defaultdict(lambda: collections.defaultdict(list))
- found = site.worker_manager.findOptionalHashIds(hash_ids, limit=limit)
-
- for hash_id, peers in found.items():
- for peer in peers:
- ip_type = helper.getIpType(peer.ip)
- if len(back[ip_type][hash_id]) < 20:
- back[ip_type][hash_id].append(peer.packMyAddress())
- return back
-
- def actionFindHashIds(self, params):
- site = self.sites.get(params["site"])
- s = time.time()
- if not site or not site.isServing(): # Site unknown or not serving
- self.response({"error": "Unknown site"})
- self.connection.badAction(5)
- return False
-
- event_key = "%s_findHashIds_%s_%s" % (self.connection.ip, params["site"], len(params["hash_ids"]))
- if self.connection.cpu_time > 0.5 or not RateLimit.isAllowed(event_key, 60 * 5):
- time.sleep(0.1)
- back = self.findHashIds(site, params["hash_ids"], limit=10)
- else:
- back = self.findHashIds(site, params["hash_ids"])
- RateLimit.called(event_key)
-
- my_hashes = []
- my_hashfield_set = set(site.content_manager.hashfield)
- for hash_id in params["hash_ids"]:
- if hash_id in my_hashfield_set:
- my_hashes.append(hash_id)
-
- if config.verbose:
- self.log.debug(
- "Found: %s for %s hashids in %.3fs" %
- ({key: len(val) for key, val in back.items()}, len(params["hash_ids"]), time.time() - s)
- )
- self.response({"peers": back["ipv4"], "peers_onion": back["onion"], "peers_ipv6": back["ipv6"], "my": my_hashes})
-
- def actionSetHashfield(self, params):
- site = self.sites.get(params["site"])
- if not site or not site.isServing(): # Site unknown or not serving
- self.response({"error": "Unknown site"})
- self.connection.badAction(5)
- return False
-
- # Add or get peer
- peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, connection=self.connection, source="request")
- if not peer.connection:
- peer.connect(self.connection)
- peer.hashfield.replaceFromBytes(params["hashfield_raw"])
- self.response({"ok": "Updated"})
-
- # Send a simple Pong! answer
- def actionPing(self, params):
- self.response(b"Pong!")
-
- # Check requested port of the other peer
- def actionCheckport(self, params):
- if helper.getIpType(self.connection.ip) == "ipv6":
- sock_address = (self.connection.ip, params["port"], 0, 0)
- else:
- sock_address = (self.connection.ip, params["port"])
-
- with closing(helper.createSocket(self.connection.ip)) as sock:
- sock.settimeout(5)
- if sock.connect_ex(sock_address) == 0:
- self.response({"status": "open", "ip_external": self.connection.ip})
- else:
- self.response({"status": "closed", "ip_external": self.connection.ip})
-
- # Unknown command
- def actionUnknown(self, cmd, params):
- self.response({"error": "Unknown command: %s" % cmd})
- self.connection.badAction(5)
diff --git a/src/File/FileServer.py b/src/File/FileServer.py
deleted file mode 100644
index 7f73017e..00000000
--- a/src/File/FileServer.py
+++ /dev/null
@@ -1,403 +0,0 @@
-import logging
-import time
-import random
-import socket
-import sys
-
-import gevent
-import gevent.pool
-from gevent.server import StreamServer
-
-import util
-from util import helper
-from Config import config
-from .FileRequest import FileRequest
-from Peer import PeerPortchecker
-from Site import SiteManager
-from Connection import ConnectionServer
-from Plugin import PluginManager
-from Debug import Debug
-
-
-@PluginManager.acceptPlugins
-class FileServer(ConnectionServer):
-
- def __init__(self, ip=config.fileserver_ip, port=config.fileserver_port, ip_type=config.fileserver_ip_type):
- self.site_manager = SiteManager.site_manager
- self.portchecker = PeerPortchecker.PeerPortchecker(self)
- self.log = logging.getLogger("FileServer")
- self.ip_type = ip_type
- self.ip_external_list = []
-
- self.supported_ip_types = ["ipv4"] # Outgoing ip_type support
- if helper.getIpType(ip) == "ipv6" or self.isIpv6Supported():
- self.supported_ip_types.append("ipv6")
-
- if ip_type == "ipv6" or (ip_type == "dual" and "ipv6" in self.supported_ip_types):
- ip = ip.replace("*", "::")
- else:
- ip = ip.replace("*", "0.0.0.0")
-
- if config.tor == "always":
- port = config.tor_hs_port
- config.fileserver_port = port
- elif port == 0: # Use random port
- port_range_from, port_range_to = list(map(int, config.fileserver_port_range.split("-")))
- port = self.getRandomPort(ip, port_range_from, port_range_to)
- config.fileserver_port = port
- if not port:
- raise Exception("Can't find bindable port")
- if not config.tor == "always":
- config.saveValue("fileserver_port", port) # Save random port value for next restart
- config.arguments.fileserver_port = port
-
- ConnectionServer.__init__(self, ip, port, self.handleRequest)
- self.log.debug("Supported IP types: %s" % self.supported_ip_types)
-
- if ip_type == "dual" and ip == "::":
- # Also bind to ipv4 addres in dual mode
- try:
- self.log.debug("Binding proxy to %s:%s" % ("::", self.port))
- self.stream_server_proxy = StreamServer(
- ("0.0.0.0", self.port), self.handleIncomingConnection, spawn=self.pool, backlog=100
- )
- except Exception as err:
- self.log.info("StreamServer proxy create error: %s" % Debug.formatException(err))
-
- self.port_opened = {}
-
- self.sites = self.site_manager.sites
- self.last_request = time.time()
- self.files_parsing = {}
- self.ui_server = None
-
- def getRandomPort(self, ip, port_range_from, port_range_to):
- self.log.info("Getting random port in range %s-%s..." % (port_range_from, port_range_to))
- tried = []
- for bind_retry in range(100):
- port = random.randint(port_range_from, port_range_to)
- if port in tried:
- continue
- tried.append(port)
- sock = helper.createSocket(ip)
- try:
- sock.bind((ip, port))
- success = True
- except Exception as err:
- self.log.warning("Error binding to port %s: %s" % (port, err))
- success = False
- sock.close()
- if success:
- self.log.info("Found unused random port: %s" % port)
- return port
- else:
- time.sleep(0.1)
- return False
-
- def isIpv6Supported(self):
- if config.tor == "always":
- return True
- # Test if we can connect to ipv6 address
- ipv6_testip = "fcec:ae97:8902:d810:6c92:ec67:efb2:3ec5"
- try:
- sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
- sock.connect((ipv6_testip, 80))
- local_ipv6 = sock.getsockname()[0]
- if local_ipv6 == "::1":
- self.log.debug("IPv6 not supported, no local IPv6 address")
- return False
- else:
- self.log.debug("IPv6 supported on IP %s" % local_ipv6)
- return True
- except socket.error as err:
- self.log.warning("IPv6 not supported: %s" % err)
- return False
- except Exception as err:
- self.log.error("IPv6 check error: %s" % err)
- return False
-
- def listenProxy(self):
- try:
- self.stream_server_proxy.serve_forever()
- except Exception as err:
- if err.errno == 98: # Address already in use error
- self.log.debug("StreamServer proxy listen error: %s" % err)
- else:
- self.log.info("StreamServer proxy listen error: %s" % err)
-
- # Handle request to fileserver
- def handleRequest(self, connection, message):
- if config.verbose:
- if "params" in message:
- self.log.debug(
- "FileRequest: %s %s %s %s" %
- (str(connection), message["cmd"], message["params"].get("site"), message["params"].get("inner_path"))
- )
- else:
- self.log.debug("FileRequest: %s %s" % (str(connection), message["cmd"]))
- req = FileRequest(self, connection)
- req.route(message["cmd"], message.get("req_id"), message.get("params"))
- if not self.has_internet and not connection.is_private_ip:
- self.has_internet = True
- self.onInternetOnline()
-
- def onInternetOnline(self):
- self.log.info("Internet online")
- gevent.spawn(self.checkSites, check_files=False, force_port_check=True)
-
- # Reload the FileRequest class to prevent restarts in debug mode
- def reload(self):
- global FileRequest
- import imp
- FileRequest = imp.load_source("FileRequest", "src/File/FileRequest.py").FileRequest
-
- def portCheck(self):
- if config.offline:
- self.log.info("Offline mode: port check disabled")
- res = {"ipv4": None, "ipv6": None}
- self.port_opened = res
- return res
-
- if config.ip_external:
- for ip_external in config.ip_external:
- SiteManager.peer_blacklist.append((ip_external, self.port)) # Add myself to peer blacklist
-
- ip_external_types = set([helper.getIpType(ip) for ip in config.ip_external])
- res = {
- "ipv4": "ipv4" in ip_external_types,
- "ipv6": "ipv6" in ip_external_types
- }
- self.ip_external_list = config.ip_external
- self.port_opened.update(res)
- self.log.info("Server port opened based on configuration ipv4: %s, ipv6: %s" % (res["ipv4"], res["ipv6"]))
- return res
-
- self.port_opened = {}
- if self.ui_server:
- self.ui_server.updateWebsocket()
-
- if "ipv6" in self.supported_ip_types:
- res_ipv6_thread = gevent.spawn(self.portchecker.portCheck, self.port, "ipv6")
- else:
- res_ipv6_thread = None
-
- res_ipv4 = self.portchecker.portCheck(self.port, "ipv4")
- if not res_ipv4["opened"] and config.tor != "always":
- if self.portchecker.portOpen(self.port):
- res_ipv4 = self.portchecker.portCheck(self.port, "ipv4")
-
- if res_ipv6_thread is None:
- res_ipv6 = {"ip": None, "opened": None}
- else:
- res_ipv6 = res_ipv6_thread.get()
- if res_ipv6["opened"] and not helper.getIpType(res_ipv6["ip"]) == "ipv6":
- self.log.info("Invalid IPv6 address from port check: %s" % res_ipv6["ip"])
- res_ipv6["opened"] = False
-
- self.ip_external_list = []
- for res_ip in [res_ipv4, res_ipv6]:
- if res_ip["ip"] and res_ip["ip"] not in self.ip_external_list:
- self.ip_external_list.append(res_ip["ip"])
- SiteManager.peer_blacklist.append((res_ip["ip"], self.port))
-
- self.log.info("Server port opened ipv4: %s, ipv6: %s" % (res_ipv4["opened"], res_ipv6["opened"]))
-
- res = {"ipv4": res_ipv4["opened"], "ipv6": res_ipv6["opened"]}
-
- # Add external IPs from local interfaces
- interface_ips = helper.getInterfaceIps("ipv4")
- if "ipv6" in self.supported_ip_types:
- interface_ips += helper.getInterfaceIps("ipv6")
- for ip in interface_ips:
- if not helper.isPrivateIp(ip) and ip not in self.ip_external_list:
- self.ip_external_list.append(ip)
- res[helper.getIpType(ip)] = True # We have opened port if we have external ip
- SiteManager.peer_blacklist.append((ip, self.port))
- self.log.debug("External ip found on interfaces: %s" % ip)
-
- self.port_opened.update(res)
-
- if self.ui_server:
- self.ui_server.updateWebsocket()
-
- return res
-
- # Check site file integrity
- def checkSite(self, site, check_files=False):
- if site.isServing():
- site.announce(mode="startup") # Announce site to tracker
- site.update(check_files=check_files) # Update site's content.json and download changed files
- site.sendMyHashfield()
- site.updateHashfield()
-
- # Check sites integrity
- @util.Noparallel()
- def checkSites(self, check_files=False, force_port_check=False):
- self.log.debug("Checking sites...")
- s = time.time()
- sites_checking = False
- if not self.port_opened or force_port_check: # Test and open port if not tested yet
- if len(self.sites) <= 2: # Don't wait port opening on first startup
- sites_checking = True
- for address, site in list(self.sites.items()):
- gevent.spawn(self.checkSite, site, check_files)
-
- self.portCheck()
-
- if not self.port_opened["ipv4"]:
- self.tor_manager.startOnions()
-
- if not sites_checking:
- check_pool = gevent.pool.Pool(5)
- # Check sites integrity
- for site in sorted(list(self.sites.values()), key=lambda site: site.settings.get("modified", 0), reverse=True):
- if not site.isServing():
- continue
- check_thread = check_pool.spawn(self.checkSite, site, check_files) # Check in new thread
- time.sleep(2)
- if site.settings.get("modified", 0) < time.time() - 60 * 60 * 24: # Not so active site, wait some sec to finish
- check_thread.join(timeout=5)
- self.log.debug("Checksites done in %.3fs" % (time.time() - s))
-
- def cleanupSites(self):
- import gc
- startup = True
- time.sleep(5 * 60) # Sites already cleaned up on startup
- peers_protected = set([])
- while 1:
- # Sites health care every 20 min
- self.log.debug(
- "Running site cleanup, connections: %s, internet: %s, protected peers: %s" %
- (len(self.connections), self.has_internet, len(peers_protected))
- )
-
- for address, site in list(self.sites.items()):
- if not site.isServing():
- continue
-
- if not startup:
- site.cleanupPeers(peers_protected)
-
- time.sleep(1) # Prevent too quick request
-
- peers_protected = set([])
- for address, site in list(self.sites.items()):
- if not site.isServing():
- continue
-
- if site.peers:
- with gevent.Timeout(10, exception=False):
- site.announcer.announcePex()
-
- # Last check modification failed
- if site.content_updated is False:
- site.update()
- elif site.bad_files:
- site.retryBadFiles()
-
- if time.time() - site.settings.get("modified", 0) < 60 * 60 * 24 * 7:
- # Keep active connections if site has been modified witin 7 days
- connected_num = site.needConnections(check_site_on_reconnect=True)
-
- if connected_num < config.connected_limit: # This site has small amount of peers, protect them from closing
- peers_protected.update([peer.key for peer in site.getConnectedPeers()])
-
- time.sleep(1) # Prevent too quick request
-
- site = None
- gc.collect() # Implicit garbage collection
- startup = False
- time.sleep(60 * 20)
-
- def announceSite(self, site):
- site.announce(mode="update", pex=False)
- active_site = time.time() - site.settings.get("modified", 0) < 24 * 60 * 60
- if site.settings["own"] or active_site:
- # Check connections more frequently on own and active sites to speed-up first connections
- site.needConnections(check_site_on_reconnect=True)
- site.sendMyHashfield(3)
- site.updateHashfield(3)
-
- # Announce sites every 20 min
- def announceSites(self):
- time.sleep(5 * 60) # Sites already announced on startup
- while 1:
- config.loadTrackersFile()
- s = time.time()
- for address, site in list(self.sites.items()):
- if not site.isServing():
- continue
- gevent.spawn(self.announceSite, site).join(timeout=10)
- time.sleep(1)
- taken = time.time() - s
-
- # Query all trackers one-by-one in 20 minutes evenly distributed
- sleep = max(0, 60 * 20 / len(config.trackers) - taken)
-
- self.log.debug("Site announce tracker done in %.3fs, sleeping for %.3fs..." % (taken, sleep))
- time.sleep(sleep)
-
- # Detects if computer back from wakeup
- def wakeupWatcher(self):
- last_time = time.time()
- last_my_ips = socket.gethostbyname_ex('')[2]
- while 1:
- time.sleep(30)
- is_time_changed = time.time() - max(self.last_request, last_time) > 60 * 3
- if is_time_changed:
- # If taken more than 3 minute then the computer was in sleep mode
- self.log.info(
- "Wakeup detected: time warp from %0.f to %0.f (%0.f sleep seconds), acting like startup..." %
- (last_time, time.time(), time.time() - last_time)
- )
-
- my_ips = socket.gethostbyname_ex('')[2]
- is_ip_changed = my_ips != last_my_ips
- if is_ip_changed:
- self.log.info("IP change detected from %s to %s" % (last_my_ips, my_ips))
-
- if is_time_changed or is_ip_changed:
- self.checkSites(check_files=False, force_port_check=True)
-
- last_time = time.time()
- last_my_ips = my_ips
-
- # Bind and start serving sites
- def start(self, check_sites=True):
- if self.stopping:
- return False
-
- ConnectionServer.start(self)
-
- try:
- self.stream_server.start()
- except Exception as err:
- self.log.error("Error listening on: %s:%s: %s" % (self.ip, self.port, err))
-
- self.sites = self.site_manager.list()
- if config.debug:
- # Auto reload FileRequest on change
- from Debug import DebugReloader
- DebugReloader.watcher.addCallback(self.reload)
-
- if check_sites: # Open port, Update sites, Check files integrity
- gevent.spawn(self.checkSites)
-
- thread_announce_sites = gevent.spawn(self.announceSites)
- thread_cleanup_sites = gevent.spawn(self.cleanupSites)
- thread_wakeup_watcher = gevent.spawn(self.wakeupWatcher)
-
- ConnectionServer.listen(self)
-
- self.log.debug("Stopped.")
-
- def stop(self):
- if self.running and self.portchecker.upnp_port_opened:
- self.log.debug('Closing port %d' % self.port)
- try:
- self.portchecker.portClose(self.port)
- self.log.info('Closed port via upnp.')
- except Exception as err:
- self.log.info("Failed at attempt to use upnp to close port: %s" % err)
-
- return ConnectionServer.stop(self)
diff --git a/src/File/__init__.py b/src/File/__init__.py
deleted file mode 100644
index 1eb602d6..00000000
--- a/src/File/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-from .FileServer import FileServer
-from .FileRequest import FileRequest
\ No newline at end of file
diff --git a/src/Peer/Peer.py b/src/Peer/Peer.py
deleted file mode 100644
index 03cc1f47..00000000
--- a/src/Peer/Peer.py
+++ /dev/null
@@ -1,410 +0,0 @@
-import logging
-import time
-import sys
-import itertools
-import collections
-
-import gevent
-
-import io
-from Debug import Debug
-from Config import config
-from util import helper
-from .PeerHashfield import PeerHashfield
-from Plugin import PluginManager
-
-if config.use_tempfiles:
- import tempfile
-
-
-# Communicate remote peers
-@PluginManager.acceptPlugins
-class Peer(object):
- __slots__ = (
- "ip", "port", "site", "key", "connection", "connection_server", "time_found", "time_response", "time_hashfield",
- "time_added", "has_hashfield", "is_tracker_connection", "time_my_hashfield_sent", "last_ping", "reputation",
- "last_content_json_update", "hashfield", "connection_error", "hash_failed", "download_bytes", "download_time"
- )
-
- def __init__(self, ip, port, site=None, connection_server=None):
- self.ip = ip
- self.port = port
- self.site = site
- self.key = "%s:%s" % (ip, port)
-
- self.connection = None
- self.connection_server = connection_server
- self.has_hashfield = False # Lazy hashfield object not created yet
- self.time_hashfield = None # Last time peer's hashfiled downloaded
- self.time_my_hashfield_sent = None # Last time my hashfield sent to peer
- self.time_found = time.time() # Time of last found in the torrent tracker
- self.time_response = None # Time of last successful response from peer
- self.time_added = time.time()
- self.last_ping = None # Last response time for ping
- self.is_tracker_connection = False # Tracker connection instead of normal peer
- self.reputation = 0 # More likely to connect if larger
- self.last_content_json_update = 0.0 # Modify date of last received content.json
-
- self.connection_error = 0 # Series of connection error
- self.hash_failed = 0 # Number of bad files from peer
- self.download_bytes = 0 # Bytes downloaded
- self.download_time = 0 # Time spent to download
-
- def __getattr__(self, key):
- if key == "hashfield":
- self.has_hashfield = True
- self.hashfield = PeerHashfield()
- return self.hashfield
- else:
- return getattr(self, key)
-
- def log(self, text):
- if not config.verbose:
- return # Only log if we are in debug mode
- if self.site:
- self.site.log.debug("%s:%s %s" % (self.ip, self.port, text))
- else:
- logging.debug("%s:%s %s" % (self.ip, self.port, text))
-
- # Connect to host
- def connect(self, connection=None):
- if self.reputation < -10:
- self.reputation = -10
- if self.reputation > 10:
- self.reputation = 10
-
- if self.connection:
- self.log("Getting connection (Closing %s)..." % self.connection)
- self.connection.close("Connection change")
- else:
- self.log("Getting connection (reputation: %s)..." % self.reputation)
-
- if connection: # Connection specified
- self.log("Assigning connection %s" % connection)
- self.connection = connection
- self.connection.sites += 1
- else: # Try to find from connection pool or create new connection
- self.connection = None
-
- try:
- if self.connection_server:
- connection_server = self.connection_server
- elif self.site:
- connection_server = self.site.connection_server
- else:
- import main
- connection_server = main.file_server
- self.connection = connection_server.getConnection(self.ip, self.port, site=self.site, is_tracker_connection=self.is_tracker_connection)
- self.reputation += 1
- self.connection.sites += 1
- except Exception as err:
- self.onConnectionError("Getting connection error")
- self.log("Getting connection error: %s (connection_error: %s, hash_failed: %s)" %
- (Debug.formatException(err), self.connection_error, self.hash_failed))
- self.connection = None
- return self.connection
-
- # Check if we have connection to peer
- def findConnection(self):
- if self.connection and self.connection.connected: # We have connection to peer
- return self.connection
- else: # Try to find from other sites connections
- self.connection = self.site.connection_server.getConnection(self.ip, self.port, create=False, site=self.site)
- if self.connection:
- self.connection.sites += 1
- return self.connection
-
- def __str__(self):
- if self.site:
- return "Peer:%-12s of %s" % (self.ip, self.site.address_short)
- else:
- return "Peer:%-12s" % self.ip
-
- def __repr__(self):
- return "<%s>" % self.__str__()
-
- def packMyAddress(self):
- if self.ip.endswith(".onion"):
- return helper.packOnionAddress(self.ip, self.port)
- else:
- return helper.packAddress(self.ip, self.port)
-
- # Found a peer from a source
- def found(self, source="other"):
- if self.reputation < 5:
- if source == "tracker":
- if self.ip.endswith(".onion"):
- self.reputation += 1
- else:
- self.reputation += 2
- elif source == "local":
- self.reputation += 20
-
- if source in ("tracker", "local"):
- self.site.peers_recent.appendleft(self)
- self.time_found = time.time()
-
- # Send a command to peer and return response value
- def request(self, cmd, params={}, stream_to=None):
- if not self.connection or self.connection.closed:
- self.connect()
- if not self.connection:
- self.onConnectionError("Reconnect error")
- return None # Connection failed
-
- self.log("Send request: %s %s %s %s" % (params.get("site", ""), cmd, params.get("inner_path", ""), params.get("location", "")))
-
- for retry in range(1, 4): # Retry 3 times
- try:
- if not self.connection:
- raise Exception("No connection found")
- res = self.connection.request(cmd, params, stream_to)
- if not res:
- raise Exception("Send error")
- if "error" in res:
- self.log("%s error: %s" % (cmd, res["error"]))
- self.onConnectionError("Response error")
- break
- else: # Successful request, reset connection error num
- self.connection_error = 0
- self.time_response = time.time()
- if res:
- return res
- else:
- raise Exception("Invalid response: %s" % res)
- except Exception as err:
- if type(err).__name__ == "Notify": # Greenlet killed by worker
- self.log("Peer worker got killed: %s, aborting cmd: %s" % (err.message, cmd))
- break
- else:
- self.onConnectionError("Request error")
- self.log(
- "%s (connection_error: %s, hash_failed: %s, retry: %s)" %
- (Debug.formatException(err), self.connection_error, self.hash_failed, retry)
- )
- time.sleep(1 * retry)
- self.connect()
- return None # Failed after 4 retry
-
- # Get a file content from peer
- def getFile(self, site, inner_path, file_size=None, pos_from=0, pos_to=None, streaming=False):
- if file_size and file_size > 5 * 1024 * 1024:
- max_read_size = 1024 * 1024
- else:
- max_read_size = 512 * 1024
-
- if pos_to:
- read_bytes = min(max_read_size, pos_to - pos_from)
- else:
- read_bytes = max_read_size
-
- location = pos_from
-
- if config.use_tempfiles:
- buff = tempfile.SpooledTemporaryFile(max_size=16 * 1024, mode='w+b')
- else:
- buff = io.BytesIO()
-
- s = time.time()
- while True: # Read in smaller parts
- if config.stream_downloads or read_bytes > 256 * 1024 or streaming:
- res = self.request("streamFile", {"site": site, "inner_path": inner_path, "location": location, "read_bytes": read_bytes, "file_size": file_size}, stream_to=buff)
- if not res or "location" not in res: # Error
- return False
- else:
- self.log("Send: %s" % inner_path)
- res = self.request("getFile", {"site": site, "inner_path": inner_path, "location": location, "read_bytes": read_bytes, "file_size": file_size})
- if not res or "location" not in res: # Error
- return False
- self.log("Recv: %s" % inner_path)
- buff.write(res["body"])
- res["body"] = None # Save memory
-
- if res["location"] == res["size"] or res["location"] == pos_to: # End of file
- break
- else:
- location = res["location"]
- if pos_to:
- read_bytes = min(max_read_size, pos_to - location)
-
- if pos_to:
- recv = pos_to - pos_from
- else:
- recv = res["location"]
-
- self.download_bytes += recv
- self.download_time += (time.time() - s)
- if self.site:
- self.site.settings["bytes_recv"] = self.site.settings.get("bytes_recv", 0) + recv
- self.log("Downloaded: %s, pos: %s, read_bytes: %s" % (inner_path, buff.tell(), read_bytes))
- buff.seek(0)
- return buff
-
- # Send a ping request
- def ping(self):
- response_time = None
- for retry in range(1, 3): # Retry 3 times
- s = time.time()
- with gevent.Timeout(10.0, False): # 10 sec timeout, don't raise exception
- res = self.request("ping")
-
- if res and "body" in res and res["body"] == b"Pong!":
- response_time = time.time() - s
- break # All fine, exit from for loop
- # Timeout reached or bad response
- self.onConnectionError("Ping timeout")
- self.connect()
- time.sleep(1)
-
- if response_time:
- self.log("Ping: %.3f" % response_time)
- else:
- self.log("Ping failed")
- self.last_ping = response_time
- return response_time
-
- # Request peer exchange from peer
- def pex(self, site=None, need_num=5):
- if not site:
- site = self.site # If no site defined request peers for this site
-
- # give back 5 connectible peers
- packed_peers = helper.packPeers(self.site.getConnectablePeers(5, allow_private=False))
- request = {"site": site.address, "peers": packed_peers["ipv4"], "need": need_num}
- if packed_peers["onion"]:
- request["peers_onion"] = packed_peers["onion"]
- if packed_peers["ipv6"]:
- request["peers_ipv6"] = packed_peers["ipv6"]
- res = self.request("pex", request)
- if not res or "error" in res:
- return False
- added = 0
-
- # Remove unsupported peer types
- if "peers_ipv6" in res and self.connection and "ipv6" not in self.connection.server.supported_ip_types:
- del res["peers_ipv6"]
-
- if "peers_onion" in res and self.connection and "onion" not in self.connection.server.supported_ip_types:
- del res["peers_onion"]
-
- # Add IPv4 + IPv6
- for peer in itertools.chain(res.get("peers", []), res.get("peers_ipv6", [])):
- address = helper.unpackAddress(peer)
- if site.addPeer(*address, source="pex"):
- added += 1
-
- # Add Onion
- for peer in res.get("peers_onion", []):
- address = helper.unpackOnionAddress(peer)
- if site.addPeer(*address, source="pex"):
- added += 1
-
- if added:
- self.log("Added peers using pex: %s" % added)
-
- return added
-
- # List modified files since the date
- # Return: {inner_path: modification date,...}
- def listModified(self, since):
- return self.request("listModified", {"since": since, "site": self.site.address})
-
- def updateHashfield(self, force=False):
- # Don't update hashfield again in 5 min
- if self.time_hashfield and time.time() - self.time_hashfield < 5 * 60 and not force:
- return False
-
- self.time_hashfield = time.time()
- res = self.request("getHashfield", {"site": self.site.address})
- if not res or "error" in res or "hashfield_raw" not in res:
- return False
- self.hashfield.replaceFromBytes(res["hashfield_raw"])
-
- return self.hashfield
-
- # Find peers for hashids
- # Return: {hash1: ["ip:port", "ip:port",...],...}
- def findHashIds(self, hash_ids):
- res = self.request("findHashIds", {"site": self.site.address, "hash_ids": hash_ids})
- if not res or "error" in res or type(res) is not dict:
- return False
-
- back = collections.defaultdict(list)
-
- for ip_type in ["ipv4", "ipv6", "onion"]:
- if ip_type == "ipv4":
- key = "peers"
- else:
- key = "peers_%s" % ip_type
- for hash, peers in list(res.get(key, {}).items())[0:30]:
- if ip_type == "onion":
- unpacker_func = helper.unpackOnionAddress
- else:
- unpacker_func = helper.unpackAddress
-
- back[hash] += list(map(unpacker_func, peers))
-
- for hash in res.get("my", []):
- if self.connection:
- back[hash].append((self.connection.ip, self.connection.port))
- else:
- back[hash].append((self.ip, self.port))
-
- return back
-
- # Send my hashfield to peer
- # Return: True if sent
- def sendMyHashfield(self):
- if self.connection and self.connection.handshake.get("rev", 0) < 510:
- return False # Not supported
- if self.time_my_hashfield_sent and self.site.content_manager.hashfield.time_changed <= self.time_my_hashfield_sent:
- return False # Peer already has the latest hashfield
-
- res = self.request("setHashfield", {"site": self.site.address, "hashfield_raw": self.site.content_manager.hashfield.tobytes()})
- if not res or "error" in res:
- return False
- else:
- self.time_my_hashfield_sent = time.time()
- return True
-
- def publish(self, address, inner_path, body, modified, diffs=[]):
- if len(body) > 10 * 1024 and self.connection and self.connection.handshake.get("rev", 0) >= 4095:
- # To save bw we don't push big content.json to peers
- body = b""
-
- return self.request("update", {
- "site": address,
- "inner_path": inner_path,
- "body": body,
- "modified": modified,
- "diffs": diffs
- })
-
- # Stop and remove from site
- def remove(self, reason="Removing"):
- self.log("Removing peer...Connection error: %s, Hash failed: %s" % (self.connection_error, self.hash_failed))
- if self.site and self.key in self.site.peers:
- del(self.site.peers[self.key])
-
- if self.site and self in self.site.peers_recent:
- self.site.peers_recent.remove(self)
-
- if self.connection:
- self.connection.close(reason)
-
- # - EVENTS -
-
- # On connection error
- def onConnectionError(self, reason="Unknown"):
- self.connection_error += 1
- if self.site and len(self.site.peers) > 200:
- limit = 3
- else:
- limit = 6
- self.reputation -= 1
- if self.connection_error >= limit: # Dead peer
- self.remove("Peer connection: %s" % reason)
-
- # Done working with peer
- def onWorkerDone(self):
- pass
diff --git a/src/Peer/PeerHashfield.py b/src/Peer/PeerHashfield.py
deleted file mode 100644
index fdd414c8..00000000
--- a/src/Peer/PeerHashfield.py
+++ /dev/null
@@ -1,75 +0,0 @@
-import array
-import time
-
-
-class PeerHashfield(object):
- __slots__ = ("storage", "time_changed", "append", "remove", "tobytes", "frombytes", "__len__", "__iter__")
- def __init__(self):
- self.storage = self.createStorage()
- self.time_changed = time.time()
-
- def createStorage(self):
- storage = array.array("H")
- self.append = storage.append
- self.remove = storage.remove
- self.tobytes = storage.tobytes
- self.frombytes = storage.frombytes
- self.__len__ = storage.__len__
- self.__iter__ = storage.__iter__
- return storage
-
- def appendHash(self, hash):
- hash_id = int(hash[0:4], 16)
- if hash_id not in self.storage:
- self.storage.append(hash_id)
- self.time_changed = time.time()
- return True
- else:
- return False
-
- def appendHashId(self, hash_id):
- if hash_id not in self.storage:
- self.storage.append(hash_id)
- self.time_changed = time.time()
- return True
- else:
- return False
-
- def removeHash(self, hash):
- hash_id = int(hash[0:4], 16)
- if hash_id in self.storage:
- self.storage.remove(hash_id)
- self.time_changed = time.time()
- return True
- else:
- return False
-
- def removeHashId(self, hash_id):
- if hash_id in self.storage:
- self.storage.remove(hash_id)
- self.time_changed = time.time()
- return True
- else:
- return False
-
- def getHashId(self, hash):
- return int(hash[0:4], 16)
-
- def hasHash(self, hash):
- return int(hash[0:4], 16) in self.storage
-
- def replaceFromBytes(self, hashfield_raw):
- self.storage = self.createStorage()
- self.storage.frombytes(hashfield_raw)
- self.time_changed = time.time()
-
-if __name__ == "__main__":
- field = PeerHashfield()
- s = time.time()
- for i in range(10000):
- field.appendHashId(i)
- print(time.time()-s)
- s = time.time()
- for i in range(10000):
- field.hasHash("AABB")
- print(time.time()-s)
\ No newline at end of file
diff --git a/src/Peer/PeerPortchecker.py b/src/Peer/PeerPortchecker.py
deleted file mode 100644
index 3c4daecf..00000000
--- a/src/Peer/PeerPortchecker.py
+++ /dev/null
@@ -1,189 +0,0 @@
-import logging
-import urllib.request
-import urllib.parse
-import re
-import time
-
-from Debug import Debug
-from util import UpnpPunch
-
-
-class PeerPortchecker(object):
- checker_functions = {
- "ipv4": ["checkIpfingerprints", "checkCanyouseeme"],
- "ipv6": ["checkMyaddr", "checkIpv6scanner"]
- }
- def __init__(self, file_server):
- self.log = logging.getLogger("PeerPortchecker")
- self.upnp_port_opened = False
- self.file_server = file_server
-
- def requestUrl(self, url, post_data=None):
- if type(post_data) is dict:
- post_data = urllib.parse.urlencode(post_data).encode("utf8")
- req = urllib.request.Request(url, post_data)
- req.add_header("Referer", url)
- req.add_header("User-Agent", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11")
- req.add_header("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8")
- return urllib.request.urlopen(req, timeout=20.0)
-
- def portOpen(self, port):
- self.log.info("Trying to open port using UpnpPunch...")
-
- try:
- UpnpPunch.ask_to_open_port(port, 'ZeroNet', retries=3, protos=["TCP"])
- self.upnp_port_opened = True
- except Exception as err:
- self.log.warning("UpnpPunch run error: %s" % Debug.formatException(err))
- return False
-
- return True
-
- def portClose(self, port):
- return UpnpPunch.ask_to_close_port(port, protos=["TCP"])
-
- def portCheck(self, port, ip_type="ipv4"):
- checker_functions = self.checker_functions[ip_type]
-
- for func_name in checker_functions:
- func = getattr(self, func_name)
- s = time.time()
- try:
- res = func(port)
- if res:
- self.log.info(
- "Checked port %s (%s) using %s result: %s in %.3fs" %
- (port, ip_type, func_name, res, time.time() - s)
- )
- time.sleep(0.1)
- if res["opened"] and not self.file_server.had_external_incoming:
- res["opened"] = False
- self.log.warning("Port %s:%s looks opened, but no incoming connection" % (res["ip"], port))
- break
- except Exception as err:
- self.log.warning(
- "%s check error: %s in %.3fs" %
- (func_name, Debug.formatException(err), time.time() - s)
- )
- res = {"ip": None, "opened": False}
-
- return res
-
- def checkCanyouseeme(self, port):
- data = urllib.request.urlopen("https://www.canyouseeme.org/", b"ip=1.1.1.1&port=%s" % str(port).encode("ascii"), timeout=20.0).read().decode("utf8")
-
- message = re.match(r'.*(.*?)
', data, re.DOTALL).group(1)
- message = re.sub(r"<.*?>", "", message.replace(" ", " ").replace(" ", " ")) # Strip http tags
-
- match = re.match(r".*service on (.*?) on", message)
- if match:
- ip = match.group(1)
- else:
- raise Exception("Invalid response: %s" % message)
-
- if "Success" in message:
- return {"ip": ip, "opened": True}
- elif "Error" in message:
- return {"ip": ip, "opened": False}
- else:
- raise Exception("Invalid response: %s" % message)
-
- def checkIpfingerprints(self, port):
- data = self.requestUrl("https://www.ipfingerprints.com/portscan.php").read().decode("utf8")
- ip = re.match(r'.*name="remoteHost".*?value="(.*?)"', data, re.DOTALL).group(1)
-
- post_data = {
- "remoteHost": ip, "start_port": port, "end_port": port,
- "normalScan": "Yes", "scan_type": "connect2", "ping_type": "none"
- }
- message = self.requestUrl("https://www.ipfingerprints.com/scripts/getPortsInfo.php", post_data).read().decode("utf8")
-
- if "open" in message:
- return {"ip": ip, "opened": True}
- elif "filtered" in message or "closed" in message:
- return {"ip": ip, "opened": False}
- else:
- raise Exception("Invalid response: %s" % message)
-
- def checkMyaddr(self, port):
- url = "http://ipv6.my-addr.com/online-ipv6-port-scan.php"
-
- data = self.requestUrl(url).read().decode("utf8")
-
- ip = re.match(r'.*Your IP address is:[ ]*([0-9\.:a-z]+)', data.replace(" ", ""), re.DOTALL).group(1)
-
- post_data = {"addr": ip, "ports_selected": "", "ports_list": port}
- data = self.requestUrl(url, post_data).read().decode("utf8")
-
- message = re.match(r".*", data, re.DOTALL).group(1)
-
- if "ok.png" in message:
- return {"ip": ip, "opened": True}
- elif "fail.png" in message:
- return {"ip": ip, "opened": False}
- else:
- raise Exception("Invalid response: %s" % message)
-
- def checkIpv6scanner(self, port):
- url = "http://www.ipv6scanner.com/cgi-bin/main.py"
-
- data = self.requestUrl(url).read().decode("utf8")
-
- ip = re.match(r'.*Your IP address is[ ]*([0-9\.:a-z]+)', data.replace(" ", ""), re.DOTALL).group(1)
-
- post_data = {"host": ip, "scanType": "1", "port": port, "protocol": "tcp", "authorized": "yes"}
- data = self.requestUrl(url, post_data).read().decode("utf8")
-
- message = re.match(r".*", data, re.DOTALL).group(1)
- message_text = re.sub("<.*?>", " ", message.replace(" ", " ").replace(" ", " ").strip()) # Strip http tags
-
- if "OPEN" in message_text:
- return {"ip": ip, "opened": True}
- elif "CLOSED" in message_text or "FILTERED" in message_text:
- return {"ip": ip, "opened": False}
- else:
- raise Exception("Invalid response: %s" % message_text)
-
- def checkPortchecker(self, port): # Not working: Forbidden
- data = self.requestUrl("https://portchecker.co").read().decode("utf8")
- csrf = re.match(r'.*name="_csrf" value="(.*?)"', data, re.DOTALL).group(1)
-
- data = self.requestUrl("https://portchecker.co", {"port": port, "_csrf": csrf}).read().decode("utf8")
- message = re.match(r'.*(.*?)
', data, re.DOTALL).group(1)
- message = re.sub(r"<.*?>", "", message.replace(" ", " ").replace(" ", " ").strip()) # Strip http tags
-
- match = re.match(r".*targetIP.*?value=\"(.*?)\"", data, re.DOTALL)
- if match:
- ip = match.group(1)
- else:
- raise Exception("Invalid response: %s" % message)
-
- if "open" in message:
- return {"ip": ip, "opened": True}
- elif "closed" in message:
- return {"ip": ip, "opened": False}
- else:
- raise Exception("Invalid response: %s" % message)
-
- def checkSubnetonline(self, port): # Not working: Invalid response
- url = "https://www.subnetonline.com/pages/ipv6-network-tools/online-ipv6-port-scanner.php"
-
- data = self.requestUrl(url).read().decode("utf8")
-
- ip = re.match(r'.*Your IP is.*?name="host".*?value="(.*?)"', data, re.DOTALL).group(1)
- token = re.match(r'.*name="token".*?value="(.*?)"', data, re.DOTALL).group(1)
-
- post_data = {"host": ip, "port": port, "allow": "on", "token": token, "submit": "Scanning.."}
- data = self.requestUrl(url, post_data).read().decode("utf8")
-
- print(post_data, data)
-
- message = re.match(r".*(.*?)
", data, re.DOTALL).group(1)
- message = re.sub(r"<.*?>", "", message.replace(" ", " ").replace(" ", " ").strip()) # Strip http tags
-
- if "online" in message:
- return {"ip": ip, "opened": True}
- elif "closed" in message:
- return {"ip": ip, "opened": False}
- else:
- raise Exception("Invalid response: %s" % message)
diff --git a/src/Peer/__init__.py b/src/Peer/__init__.py
deleted file mode 100644
index e73c58c5..00000000
--- a/src/Peer/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-from .Peer import Peer
-from .PeerHashfield import PeerHashfield
diff --git a/src/Plugin/PluginManager.py b/src/Plugin/PluginManager.py
deleted file mode 100644
index dbafa98f..00000000
--- a/src/Plugin/PluginManager.py
+++ /dev/null
@@ -1,287 +0,0 @@
-import logging
-import os
-import sys
-import shutil
-import time
-from collections import defaultdict
-
-import importlib
-import json
-
-from Debug import Debug
-from Config import config
-import plugins
-
-
-class PluginManager:
- def __init__(self):
- self.log = logging.getLogger("PluginManager")
- self.path_plugins = os.path.abspath(os.path.dirname(plugins.__file__))
- self.path_installed_plugins = config.data_dir + "/__plugins__"
- self.plugins = defaultdict(list) # Registered plugins (key: class name, value: list of plugins for class)
- self.subclass_order = {} # Record the load order of the plugins, to keep it after reload
- self.pluggable = {}
- self.plugin_names = [] # Loaded plugin names
- self.plugins_updated = {} # List of updated plugins since restart
- self.plugins_rev = {} # Installed plugins revision numbers
- self.after_load = [] # Execute functions after loaded plugins
- self.function_flags = {} # Flag function for permissions
- self.reloading = False
- self.config_path = config.data_dir + "/plugins.json"
- self.loadConfig()
-
- self.config.setdefault("builtin", {})
-
- sys.path.append(os.path.join(os.getcwd(), self.path_plugins))
- self.migratePlugins()
-
- if config.debug: # Auto reload Plugins on file change
- from Debug import DebugReloader
- DebugReloader.watcher.addCallback(self.reloadPlugins)
-
- def loadConfig(self):
- if os.path.isfile(self.config_path):
- try:
- self.config = json.load(open(self.config_path, encoding="utf8"))
- except Exception as err:
- self.log.error("Error loading %s: %s" % (self.config_path, err))
- self.config = {}
- else:
- self.config = {}
-
- def saveConfig(self):
- f = open(self.config_path, "w", encoding="utf8")
- json.dump(self.config, f, ensure_ascii=False, sort_keys=True, indent=2)
-
- def migratePlugins(self):
- for dir_name in os.listdir(self.path_plugins):
- if dir_name == "Mute":
- self.log.info("Deleting deprecated/renamed plugin: %s" % dir_name)
- shutil.rmtree("%s/%s" % (self.path_plugins, dir_name))
-
- # -- Load / Unload --
-
- def listPlugins(self, list_disabled=False):
- plugins = []
- for dir_name in sorted(os.listdir(self.path_plugins)):
- dir_path = os.path.join(self.path_plugins, dir_name)
- plugin_name = dir_name.replace("disabled-", "")
- if dir_name.startswith("disabled"):
- is_enabled = False
- else:
- is_enabled = True
-
- plugin_config = self.config["builtin"].get(plugin_name, {})
- if "enabled" in plugin_config:
- is_enabled = plugin_config["enabled"]
-
- if dir_name == "__pycache__" or not os.path.isdir(dir_path):
- continue # skip
- if dir_name.startswith("Debug") and not config.debug:
- continue # Only load in debug mode if module name starts with Debug
- if not is_enabled and not list_disabled:
- continue # Dont load if disabled
-
- plugin = {}
- plugin["source"] = "builtin"
- plugin["name"] = plugin_name
- plugin["dir_name"] = dir_name
- plugin["dir_path"] = dir_path
- plugin["inner_path"] = plugin_name
- plugin["enabled"] = is_enabled
- plugin["rev"] = config.rev
- plugin["loaded"] = plugin_name in self.plugin_names
- plugins.append(plugin)
-
- plugins += self.listInstalledPlugins(list_disabled)
- return plugins
-
- def listInstalledPlugins(self, list_disabled=False):
- plugins = []
-
- for address, site_plugins in sorted(self.config.items()):
- if address == "builtin":
- continue
- for plugin_inner_path, plugin_config in sorted(site_plugins.items()):
- is_enabled = plugin_config.get("enabled", False)
- if not is_enabled and not list_disabled:
- continue
- plugin_name = os.path.basename(plugin_inner_path)
-
- dir_path = "%s/%s/%s" % (self.path_installed_plugins, address, plugin_inner_path)
-
- plugin = {}
- plugin["source"] = address
- plugin["name"] = plugin_name
- plugin["dir_name"] = plugin_name
- plugin["dir_path"] = dir_path
- plugin["inner_path"] = plugin_inner_path
- plugin["enabled"] = is_enabled
- plugin["rev"] = plugin_config.get("rev", 0)
- plugin["loaded"] = plugin_name in self.plugin_names
- plugins.append(plugin)
-
- return plugins
-
- # Load all plugin
- def loadPlugins(self):
- all_loaded = True
- s = time.time()
- for plugin in self.listPlugins():
- self.log.debug("Loading plugin: %s (%s)" % (plugin["name"], plugin["source"]))
- if plugin["source"] != "builtin":
- self.plugins_rev[plugin["name"]] = plugin["rev"]
- site_plugin_dir = os.path.dirname(plugin["dir_path"])
- if site_plugin_dir not in sys.path:
- sys.path.append(site_plugin_dir)
- try:
- sys.modules[plugin["name"]] = __import__(plugin["dir_name"])
- except Exception as err:
- self.log.error("Plugin %s load error: %s" % (plugin["name"], Debug.formatException(err)))
- all_loaded = False
- if plugin["name"] not in self.plugin_names:
- self.plugin_names.append(plugin["name"])
-
- self.log.debug("Plugins loaded in %.3fs" % (time.time() - s))
- for func in self.after_load:
- func()
- return all_loaded
-
- # Reload all plugins
- def reloadPlugins(self):
- self.reloading = True
- self.after_load = []
- self.plugins_before = self.plugins
- self.plugins = defaultdict(list) # Reset registered plugins
- for module_name, module in list(sys.modules.items()):
- if not module or not getattr(module, "__file__", None):
- continue
- if self.path_plugins not in module.__file__ and self.path_installed_plugins not in module.__file__:
- continue
-
- if "allow_reload" in dir(module) and not module.allow_reload: # Reload disabled
- # Re-add non-reloadable plugins
- for class_name, classes in self.plugins_before.items():
- for c in classes:
- if c.__module__ != module.__name__:
- continue
- self.plugins[class_name].append(c)
- else:
- try:
- importlib.reload(module)
- except Exception as err:
- self.log.error("Plugin %s reload error: %s" % (module_name, Debug.formatException(err)))
-
- self.loadPlugins() # Load new plugins
-
- # Change current classes in memory
- import gc
- patched = {}
- for class_name, classes in self.plugins.items():
- classes = classes[:] # Copy the current plugins
- classes.reverse()
- base_class = self.pluggable[class_name] # Original class
- classes.append(base_class) # Add the class itself to end of inherience line
- plugined_class = type(class_name, tuple(classes), dict()) # Create the plugined class
- for obj in gc.get_objects():
- if type(obj).__name__ == class_name:
- obj.__class__ = plugined_class
- patched[class_name] = patched.get(class_name, 0) + 1
- self.log.debug("Patched objects: %s" % patched)
-
- # Change classes in modules
- patched = {}
- for class_name, classes in self.plugins.items():
- for module_name, module in list(sys.modules.items()):
- if class_name in dir(module):
- if "__class__" not in dir(getattr(module, class_name)): # Not a class
- continue
- base_class = self.pluggable[class_name]
- classes = self.plugins[class_name][:]
- classes.reverse()
- classes.append(base_class)
- plugined_class = type(class_name, tuple(classes), dict())
- setattr(module, class_name, plugined_class)
- patched[class_name] = patched.get(class_name, 0) + 1
-
- self.log.debug("Patched modules: %s" % patched)
- self.reloading = False
-
-
-plugin_manager = PluginManager() # Singletone
-
-# -- Decorators --
-
-# Accept plugin to class decorator
-
-
-def acceptPlugins(base_class):
- class_name = base_class.__name__
- plugin_manager.pluggable[class_name] = base_class
- if class_name in plugin_manager.plugins: # Has plugins
- classes = plugin_manager.plugins[class_name][:] # Copy the current plugins
-
- # Restore the subclass order after reload
- if class_name in plugin_manager.subclass_order:
- classes = sorted(
- classes,
- key=lambda key:
- plugin_manager.subclass_order[class_name].index(str(key))
- if str(key) in plugin_manager.subclass_order[class_name]
- else 9999
- )
- plugin_manager.subclass_order[class_name] = list(map(str, classes))
-
- classes.reverse()
- classes.append(base_class) # Add the class itself to end of inherience line
- plugined_class = type(class_name, tuple(classes), dict()) # Create the plugined class
- plugin_manager.log.debug("New class accepts plugins: %s (Loaded plugins: %s)" % (class_name, classes))
- else: # No plugins just use the original
- plugined_class = base_class
- return plugined_class
-
-
-# Register plugin to class name decorator
-def registerTo(class_name):
- if config.debug and not plugin_manager.reloading:
- import gc
- for obj in gc.get_objects():
- if type(obj).__name__ == class_name:
- raise Exception("Class %s instances already present in memory" % class_name)
- break
-
- plugin_manager.log.debug("New plugin registered to: %s" % class_name)
- if class_name not in plugin_manager.plugins:
- plugin_manager.plugins[class_name] = []
-
- def classDecorator(self):
- plugin_manager.plugins[class_name].append(self)
- return self
- return classDecorator
-
-
-def afterLoad(func):
- plugin_manager.after_load.append(func)
- return func
-
-
-# - Example usage -
-
-if __name__ == "__main__":
- @registerTo("Request")
- class RequestPlugin(object):
-
- def actionMainPage(self, path):
- return "Hello MainPage!"
-
- @acceptPlugins
- class Request(object):
-
- def route(self, path):
- func = getattr(self, "action" + path, None)
- if func:
- return func(path)
- else:
- return "Can't route to", path
-
- print(Request().route("MainPage"))
diff --git a/src/Plugin/__init__.py b/src/Plugin/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/src/Site/Site.py b/src/Site/Site.py
deleted file mode 100644
index 354fe9c0..00000000
--- a/src/Site/Site.py
+++ /dev/null
@@ -1,1147 +0,0 @@
-import os
-import json
-import logging
-import re
-import time
-import random
-import sys
-import hashlib
-import collections
-import base64
-
-import gevent
-import gevent.pool
-
-import util
-from Config import config
-from Peer import Peer
-from Worker import WorkerManager
-from Debug import Debug
-from Content import ContentManager
-from .SiteStorage import SiteStorage
-from Crypt import CryptHash
-from util import helper
-from util import Diff
-from util import GreenletManager
-from Plugin import PluginManager
-from File import FileServer
-from .SiteAnnouncer import SiteAnnouncer
-from . import SiteManager
-
-
-@PluginManager.acceptPlugins
-class Site(object):
-
- def __init__(self, address, allow_create=True, settings=None):
- self.address = str(re.sub("[^A-Za-z0-9]", "", address)) # Make sure its correct address
- self.address_hash = hashlib.sha256(self.address.encode("ascii")).digest()
- self.address_sha1 = hashlib.sha1(self.address.encode("ascii")).digest()
- self.address_short = "%s..%s" % (self.address[:6], self.address[-4:]) # Short address for logging
- self.log = logging.getLogger("Site:%s" % self.address_short)
- self.addEventListeners()
-
- self.content = None # Load content.json
- self.peers = {} # Key: ip:port, Value: Peer.Peer
- self.peers_recent = collections.deque(maxlen=150)
- self.peer_blacklist = SiteManager.peer_blacklist # Ignore this peers (eg. myself)
- self.greenlet_manager = GreenletManager.GreenletManager() # Running greenlets
- self.worker_manager = WorkerManager(self) # Handle site download from other peers
- self.bad_files = {} # SHA check failed files, need to redownload {"inner.content": 1} (key: file, value: failed accept)
- self.content_updated = None # Content.js update time
- self.notifications = [] # Pending notifications displayed once on page load [error|ok|info, message, timeout]
- self.page_requested = False # Page viewed in browser
- self.websockets = [] # Active site websocket connections
-
- self.connection_server = None
- self.loadSettings(settings) # Load settings from sites.json
- self.storage = SiteStorage(self, allow_create=allow_create) # Save and load site files
- self.content_manager = ContentManager(self)
- self.content_manager.loadContents() # Load content.json files
- if "main" in sys.modules: # import main has side-effects, breaks tests
- import main
- if "file_server" in dir(main): # Use global file server by default if possible
- self.connection_server = main.file_server
- else:
- main.file_server = FileServer()
- self.connection_server = main.file_server
- else:
- self.connection_server = FileServer()
-
- self.announcer = SiteAnnouncer(self) # Announce and get peer list from other nodes
-
- if not self.settings.get("wrapper_key"): # To auth websocket permissions
- self.settings["wrapper_key"] = CryptHash.random()
- self.log.debug("New wrapper key: %s" % self.settings["wrapper_key"])
-
- if not self.settings.get("ajax_key"): # To auth websocket permissions
- self.settings["ajax_key"] = CryptHash.random()
- self.log.debug("New ajax key: %s" % self.settings["ajax_key"])
-
- def __str__(self):
- return "Site %s" % self.address_short
-
- def __repr__(self):
- return "<%s>" % self.__str__()
-
- # Load site settings from data/sites.json
- def loadSettings(self, settings=None):
- if not settings:
- settings = json.load(open("%s/sites.json" % config.data_dir)).get(self.address)
- if settings:
- self.settings = settings
- if "cache" not in settings:
- settings["cache"] = {}
- if "size_files_optional" not in settings:
- settings["size_optional"] = 0
- if "optional_downloaded" not in settings:
- settings["optional_downloaded"] = 0
- if "downloaded" not in settings:
- settings["downloaded"] = settings.get("added")
- self.bad_files = settings["cache"].get("bad_files", {})
- settings["cache"]["bad_files"] = {}
- # Give it minimum 10 tries after restart
- for inner_path in self.bad_files:
- self.bad_files[inner_path] = min(self.bad_files[inner_path], 20)
- else:
- self.settings = {
- "own": False, "serving": True, "permissions": [], "cache": {"bad_files": {}}, "size_files_optional": 0,
- "added": int(time.time()), "downloaded": None, "optional_downloaded": 0, "size_optional": 0
- } # Default
- if config.download_optional == "auto":
- self.settings["autodownloadoptional"] = True
-
- # Add admin permissions to homepage
- if self.address in (config.homepage, config.updatesite) and "ADMIN" not in self.settings["permissions"]:
- self.settings["permissions"].append("ADMIN")
-
- return
-
- # Save site settings to data/sites.json
- def saveSettings(self):
- if not SiteManager.site_manager.sites:
- SiteManager.site_manager.sites = {}
- if not SiteManager.site_manager.sites.get(self.address):
- SiteManager.site_manager.sites[self.address] = self
- SiteManager.site_manager.load(False)
- SiteManager.site_manager.saveDelayed()
-
- def isServing(self):
- if config.offline:
- return False
- else:
- return self.settings["serving"]
-
- def getSettingsCache(self):
- back = {}
- back["bad_files"] = self.bad_files
- back["hashfield"] = base64.b64encode(self.content_manager.hashfield.tobytes()).decode("ascii")
- return back
-
- # Max site size in MB
- def getSizeLimit(self):
- return self.settings.get("size_limit", int(config.size_limit))
-
- # Next size limit based on current size
- def getNextSizeLimit(self):
- size_limits = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000, 50000, 100000]
- size = self.settings.get("size", 0)
- for size_limit in size_limits:
- if size * 1.2 < size_limit * 1024 * 1024:
- return size_limit
- return 999999
-
- def isAddedRecently(self):
- return time.time() - self.settings.get("added", 0) < 60 * 60 * 24
-
- # Download all file from content.json
- def downloadContent(self, inner_path, download_files=True, peer=None, check_modifications=False, diffs={}):
- s = time.time()
- if config.verbose:
- self.log.debug(
- "DownloadContent %s: Started. (download_files: %s, check_modifications: %s, diffs: %s)..." %
- (inner_path, download_files, check_modifications, diffs.keys())
- )
-
- if not inner_path.endswith("content.json"):
- return False
-
- found = self.needFile(inner_path, update=self.bad_files.get(inner_path))
- content_inner_dir = helper.getDirname(inner_path)
- if not found:
- self.log.debug("DownloadContent %s: Download failed, check_modifications: %s" % (inner_path, check_modifications))
- if check_modifications: # Download failed, but check modifications if its succed later
- self.onFileDone.once(lambda file_name: self.checkModifications(0), "check_modifications")
- return False # Could not download content.json
-
- if config.verbose:
- self.log.debug("DownloadContent got %s" % inner_path)
- sub_s = time.time()
-
- changed, deleted = self.content_manager.loadContent(inner_path, load_includes=False)
-
- if config.verbose:
- self.log.debug("DownloadContent %s: loadContent done in %.3fs" % (inner_path, time.time() - sub_s))
-
- if inner_path == "content.json":
- self.saveSettings()
-
- if peer: # Update last received update from peer to prevent re-sending the same update to it
- peer.last_content_json_update = self.content_manager.contents[inner_path]["modified"]
-
- # Verify size limit
- if inner_path == "content.json":
- site_size_limit = self.getSizeLimit() * 1024 * 1024
- content_size = len(json.dumps(self.content_manager.contents[inner_path], indent=1)) + sum([file["size"] for file in list(self.content_manager.contents[inner_path].get("files", {}).values()) if file["size"] >= 0]) # Size of new content
- if site_size_limit < content_size:
- # Not enought don't download anything
- self.log.debug("DownloadContent Size limit reached (site too big please increase limit): %.2f MB > %.2f MB" % (content_size / 1024 / 1024, site_size_limit / 1024 / 1024))
- return False
-
- # Start download files
- file_threads = []
- if download_files:
- for file_relative_path in list(self.content_manager.contents[inner_path].get("files", {}).keys()):
- file_inner_path = content_inner_dir + file_relative_path
-
- # Try to diff first
- diff_success = False
- diff_actions = diffs.get(file_relative_path)
- if diff_actions and self.bad_files.get(file_inner_path):
- try:
- s = time.time()
- new_file = Diff.patch(self.storage.open(file_inner_path, "rb"), diff_actions)
- new_file.seek(0)
- time_diff = time.time() - s
-
- s = time.time()
- diff_success = self.content_manager.verifyFile(file_inner_path, new_file)
- time_verify = time.time() - s
-
- if diff_success:
- s = time.time()
- new_file.seek(0)
- self.storage.write(file_inner_path, new_file)
- time_write = time.time() - s
-
- s = time.time()
- self.onFileDone(file_inner_path)
- time_on_done = time.time() - s
-
- self.log.debug(
- "DownloadContent Patched successfully: %s (diff: %.3fs, verify: %.3fs, write: %.3fs, on_done: %.3fs)" %
- (file_inner_path, time_diff, time_verify, time_write, time_on_done)
- )
- except Exception as err:
- self.log.debug("DownloadContent Failed to patch %s: %s" % (file_inner_path, err))
- diff_success = False
-
- if not diff_success:
- # Start download and dont wait for finish, return the event
- res = self.needFile(file_inner_path, blocking=False, update=self.bad_files.get(file_inner_path), peer=peer)
- if res is not True and res is not False: # Need downloading and file is allowed
- file_threads.append(res) # Append evt
-
- # Optionals files
- if inner_path == "content.json":
- gevent.spawn(self.updateHashfield)
-
- for file_relative_path in list(self.content_manager.contents[inner_path].get("files_optional", {}).keys()):
- file_inner_path = content_inner_dir + file_relative_path
- if file_inner_path not in changed and not self.bad_files.get(file_inner_path):
- continue
- if not self.isDownloadable(file_inner_path):
- continue
- # Start download and dont wait for finish, return the event
- res = self.pooledNeedFile(
- file_inner_path, blocking=False, update=self.bad_files.get(file_inner_path), peer=peer
- )
- if res is not True and res is not False: # Need downloading and file is allowed
- file_threads.append(res) # Append evt
-
- # Wait for includes download
- include_threads = []
- for file_relative_path in list(self.content_manager.contents[inner_path].get("includes", {}).keys()):
- file_inner_path = content_inner_dir + file_relative_path
- include_thread = gevent.spawn(self.downloadContent, file_inner_path, download_files=download_files, peer=peer)
- include_threads.append(include_thread)
-
- if config.verbose:
- self.log.debug("DownloadContent %s: Downloading %s includes..." % (inner_path, len(include_threads)))
- gevent.joinall(include_threads)
- if config.verbose:
- self.log.debug("DownloadContent %s: Includes download ended" % inner_path)
-
- if check_modifications: # Check if every file is up-to-date
- self.checkModifications(0)
-
- if config.verbose:
- self.log.debug("DownloadContent %s: Downloading %s files, changed: %s..." % (inner_path, len(file_threads), len(changed)))
- gevent.joinall(file_threads)
- if config.verbose:
- self.log.debug("DownloadContent %s: ended in %.3fs (tasks left: %s)" % (
- inner_path, time.time() - s, len(self.worker_manager.tasks)
- ))
-
- return True
-
- # Return bad files with less than 3 retry
- def getReachableBadFiles(self):
- if not self.bad_files:
- return False
- return [bad_file for bad_file, retry in self.bad_files.items() if retry < 3]
-
- # Retry download bad files
- def retryBadFiles(self, force=False):
- self.checkBadFiles()
-
- self.log.debug("Retry %s bad files" % len(self.bad_files))
- content_inner_paths = []
- file_inner_paths = []
-
- for bad_file, tries in list(self.bad_files.items()):
- if force or random.randint(0, min(40, tries)) < 4: # Larger number tries = less likely to check every 15min
- if bad_file.endswith("content.json"):
- content_inner_paths.append(bad_file)
- else:
- file_inner_paths.append(bad_file)
-
- if content_inner_paths:
- self.pooledDownloadContent(content_inner_paths, only_if_bad=True)
-
- if file_inner_paths:
- self.pooledDownloadFile(file_inner_paths, only_if_bad=True)
-
- def checkBadFiles(self):
- for bad_file in list(self.bad_files.keys()):
- file_info = self.content_manager.getFileInfo(bad_file)
- if bad_file.endswith("content.json"):
- if file_info is False and bad_file != "content.json":
- del self.bad_files[bad_file]
- self.log.debug("No info for file: %s, removing from bad_files" % bad_file)
- else:
- if file_info is False or not file_info.get("size"):
- del self.bad_files[bad_file]
- self.log.debug("No info or size for file: %s, removing from bad_files" % bad_file)
-
- # Download all files of the site
- @util.Noparallel(blocking=False)
- def download(self, check_size=False, blind_includes=False, retry_bad_files=True):
- if not self.connection_server:
- self.log.debug("No connection server found, skipping download")
- return False
-
- s = time.time()
- self.log.debug(
- "Start downloading, bad_files: %s, check_size: %s, blind_includes: %s, isAddedRecently: %s" %
- (self.bad_files, check_size, blind_includes, self.isAddedRecently())
- )
-
- if self.isAddedRecently():
- gevent.spawn(self.announce, mode="start", force=True)
- else:
- gevent.spawn(self.announce, mode="update")
-
- if check_size: # Check the size first
- valid = self.downloadContent("content.json", download_files=False) # Just download content.json files
- if not valid:
- return False # Cant download content.jsons or size is not fits
-
- # Download everything
- valid = self.downloadContent("content.json", check_modifications=blind_includes)
-
- if retry_bad_files:
- self.onComplete.once(lambda: self.retryBadFiles(force=True))
- self.log.debug("Download done in %.3fs" % (time.time() - s))
-
- return valid
-
- def pooledDownloadContent(self, inner_paths, pool_size=100, only_if_bad=False):
- self.log.debug("New downloadContent pool: len: %s, only if bad: %s" % (len(inner_paths), only_if_bad))
- self.worker_manager.started_task_num += len(inner_paths)
- pool = gevent.pool.Pool(pool_size)
- num_skipped = 0
- site_size_limit = self.getSizeLimit() * 1024 * 1024
- for inner_path in inner_paths:
- if not only_if_bad or inner_path in self.bad_files:
- pool.spawn(self.downloadContent, inner_path)
- else:
- num_skipped += 1
- self.worker_manager.started_task_num -= 1
- if self.settings["size"] > site_size_limit * 0.95:
- self.log.warning("Site size limit almost reached, aborting downloadContent pool")
- for aborted_inner_path in inner_paths:
- if aborted_inner_path in self.bad_files:
- del self.bad_files[aborted_inner_path]
- self.worker_manager.removeSolvedFileTasks(mark_as_good=False)
- break
- pool.join()
- self.log.debug("Ended downloadContent pool len: %s, skipped: %s" % (len(inner_paths), num_skipped))
-
- def pooledDownloadFile(self, inner_paths, pool_size=100, only_if_bad=False):
- self.log.debug("New downloadFile pool: len: %s, only if bad: %s" % (len(inner_paths), only_if_bad))
- self.worker_manager.started_task_num += len(inner_paths)
- pool = gevent.pool.Pool(pool_size)
- num_skipped = 0
- for inner_path in inner_paths:
- if not only_if_bad or inner_path in self.bad_files:
- pool.spawn(self.needFile, inner_path, update=True)
- else:
- num_skipped += 1
- self.worker_manager.started_task_num -= 1
- self.log.debug("Ended downloadFile pool len: %s, skipped: %s" % (len(inner_paths), num_skipped))
-
- # Update worker, try to find client that supports listModifications command
- def updater(self, peers_try, queried, since):
- threads = []
- while 1:
- if not peers_try or len(queried) >= 3: # Stop after 3 successful query
- break
- peer = peers_try.pop(0)
- if config.verbose:
- self.log.debug("CheckModifications: Try to get updates from: %s Left: %s" % (peer, peers_try))
-
- res = None
- with gevent.Timeout(20, exception=False):
- res = peer.listModified(since)
-
- if not res or "modified_files" not in res:
- continue # Failed query
-
- queried.append(peer)
- modified_contents = []
- my_modified = self.content_manager.listModified(since)
- num_old_files = 0
- for inner_path, modified in res["modified_files"].items(): # Check if the peer has newer files than we
- has_newer = int(modified) > my_modified.get(inner_path, 0)
- has_older = int(modified) < my_modified.get(inner_path, 0)
- if inner_path not in self.bad_files and not self.content_manager.isArchived(inner_path, modified):
- if has_newer:
- # We dont have this file or we have older
- modified_contents.append(inner_path)
- self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1
- if has_older and num_old_files < 5:
- num_old_files += 1
- self.log.debug("CheckModifications: %s client has older version of %s, publishing there (%s/5)..." % (peer, inner_path, num_old_files))
- gevent.spawn(self.publisher, inner_path, [peer], [], 1)
- if modified_contents:
- self.log.debug("CheckModifications: %s new modified file from %s" % (len(modified_contents), peer))
- modified_contents.sort(key=lambda inner_path: 0 - res["modified_files"][inner_path]) # Download newest first
- t = gevent.spawn(self.pooledDownloadContent, modified_contents, only_if_bad=True)
- threads.append(t)
- if config.verbose:
- self.log.debug("CheckModifications: Waiting for %s pooledDownloadContent" % len(threads))
- gevent.joinall(threads)
-
- # Check modified content.json files from peers and add modified files to bad_files
- # Return: Successfully queried peers [Peer, Peer...]
- def checkModifications(self, since=None):
- s = time.time()
- peers_try = [] # Try these peers
- queried = [] # Successfully queried from these peers
- limit = 5
-
- # Wait for peers
- if not self.peers:
- self.announce(mode="update")
- for wait in range(10):
- time.sleep(5 + wait)
- self.log.debug("CheckModifications: Waiting for peers...")
- if self.peers:
- break
-
- peers_try = self.getConnectedPeers()
- peers_connected_num = len(peers_try)
- if peers_connected_num < limit * 2: # Add more, non-connected peers if necessary
- peers_try += self.getRecentPeers(limit * 5)
-
- if since is None: # No since defined, download from last modification time-1day
- since = self.settings.get("modified", 60 * 60 * 24) - 60 * 60 * 24
-
- if config.verbose:
- self.log.debug(
- "CheckModifications: Try to get listModifications from peers: %s, connected: %s, since: %s" %
- (peers_try, peers_connected_num, since)
- )
-
- updaters = []
- for i in range(3):
- updaters.append(gevent.spawn(self.updater, peers_try, queried, since))
-
- gevent.joinall(updaters, timeout=10) # Wait 10 sec to workers done query modifications
-
- if not queried: # Start another 3 thread if first 3 is stuck
- peers_try[0:0] = [peer for peer in self.getConnectedPeers() if peer.connection.connected] # Add connected peers
- for _ in range(10):
- gevent.joinall(updaters, timeout=10) # Wait another 10 sec if none of updaters finished
- if queried:
- break
-
- self.log.debug("CheckModifications: Queried listModifications from: %s in %.3fs since %s" % (queried, time.time() - s, since))
- time.sleep(0.1)
- return queried
-
- # Update content.json from peers and download changed files
- # Return: None
- @util.Noparallel()
- def update(self, announce=False, check_files=False, since=None):
- self.content_manager.loadContent("content.json", load_includes=False) # Reload content.json
- self.content_updated = None # Reset content updated time
-
- if check_files:
- self.storage.updateBadFiles(quick_check=True) # Quick check and mark bad files based on file size
-
- if not self.isServing():
- return False
-
- self.updateWebsocket(updating=True)
-
- # Remove files that no longer in content.json
- self.checkBadFiles()
-
- if announce:
- self.announce(mode="update", force=True)
-
- # Full update, we can reset bad files
- if check_files and since == 0:
- self.bad_files = {}
-
- queried = self.checkModifications(since)
-
- changed, deleted = self.content_manager.loadContent("content.json", load_includes=False)
-
- if self.bad_files:
- self.log.debug("Bad files: %s" % self.bad_files)
- gevent.spawn(self.retryBadFiles, force=True)
-
- if len(queried) == 0:
- # Failed to query modifications
- self.content_updated = False
- else:
- self.content_updated = time.time()
-
- self.updateWebsocket(updated=True)
-
- # Update site by redownload all content.json
- def redownloadContents(self):
- # Download all content.json again
- content_threads = []
- for inner_path in list(self.content_manager.contents.keys()):
- content_threads.append(self.needFile(inner_path, update=True, blocking=False))
-
- self.log.debug("Waiting %s content.json to finish..." % len(content_threads))
- gevent.joinall(content_threads)
-
- # Publish worker
- def publisher(self, inner_path, peers, published, limit, diffs={}, event_done=None, cb_progress=None):
- file_size = self.storage.getSize(inner_path)
- content_json_modified = self.content_manager.contents[inner_path]["modified"]
- body = self.storage.read(inner_path)
-
- while 1:
- if not peers or len(published) >= limit:
- if event_done:
- event_done.set(True)
- break # All peers done, or published engouht
- peer = peers.pop()
- if peer in published:
- continue
- if peer.last_content_json_update == content_json_modified:
- self.log.debug("%s already received this update for %s, skipping" % (peer, inner_path))
- continue
-
- if peer.connection and peer.connection.last_ping_delay: # Peer connected
- # Timeout: 5sec + size in kb + last_ping
- timeout = 5 + int(file_size / 1024) + peer.connection.last_ping_delay
- else: # Peer not connected
- # Timeout: 10sec + size in kb
- timeout = 10 + int(file_size / 1024)
- result = {"exception": "Timeout"}
-
- for retry in range(2):
- try:
- with gevent.Timeout(timeout, False):
- result = peer.publish(self.address, inner_path, body, content_json_modified, diffs)
- if result:
- break
- except Exception as err:
- self.log.error("Publish error: %s" % Debug.formatException(err))
- result = {"exception": Debug.formatException(err)}
-
- if result and "ok" in result:
- published.append(peer)
- if cb_progress and len(published) <= limit:
- cb_progress(len(published), limit)
- self.log.info("[OK] %s: %s %s/%s" % (peer.key, result["ok"], len(published), limit))
- else:
- if result == {"exception": "Timeout"}:
- peer.onConnectionError("Publish timeout")
- self.log.info("[FAILED] %s: %s" % (peer.key, result))
- time.sleep(0.01)
-
- # Update content.json on peers
- @util.Noparallel()
- def publish(self, limit="default", inner_path="content.json", diffs={}, cb_progress=None):
- published = [] # Successfully published (Peer)
- publishers = [] # Publisher threads
-
- if not self.peers:
- self.announce(mode="more")
-
- if limit == "default":
- limit = 5
- threads = limit
-
- peers = self.getConnectedPeers()
- num_connected_peers = len(peers)
-
- random.shuffle(peers)
- peers = sorted(peers, key=lambda peer: peer.connection.handshake.get("rev", 0) < config.rev - 100) # Prefer newer clients
-
- if len(peers) < limit * 2 and len(self.peers) > len(peers): # Add more, non-connected peers if necessary
- peers += self.getRecentPeers(limit * 2)
-
- peers = set(peers)
-
- self.log.info("Publishing %s to %s/%s peers (connected: %s) diffs: %s (%.2fk)..." % (
- inner_path, limit, len(self.peers), num_connected_peers, list(diffs.keys()), float(len(str(diffs))) / 1024
- ))
-
- if not peers:
- return 0 # No peers found
-
- event_done = gevent.event.AsyncResult()
- for i in range(min(len(peers), limit, threads)):
- publisher = gevent.spawn(self.publisher, inner_path, peers, published, limit, diffs, event_done, cb_progress)
- publishers.append(publisher)
-
- event_done.get() # Wait for done
- if len(published) < min(len(self.peers), limit):
- time.sleep(0.2) # If less than we need sleep a bit
- if len(published) == 0:
- gevent.joinall(publishers) # No successful publish, wait for all publisher
-
- # Publish more peers in the backgroup
- self.log.info(
- "Published %s to %s peers, publishing to %s more peers in the background" %
- (inner_path, len(published), limit)
- )
-
- for thread in range(2):
- gevent.spawn(self.publisher, inner_path, peers, published, limit=limit * 2, diffs=diffs)
-
- # Send my hashfield to every connected peer if changed
- gevent.spawn(self.sendMyHashfield, 100)
-
- return len(published)
-
- # Copy this site
- @util.Noparallel()
- def clone(self, address, privatekey=None, address_index=None, root_inner_path="", overwrite=False):
- import shutil
- new_site = SiteManager.site_manager.need(address, all_file=False)
- default_dirs = [] # Dont copy these directories (has -default version)
- for dir_name in os.listdir(self.storage.directory):
- if "-default" in dir_name:
- default_dirs.append(dir_name.replace("-default", ""))
-
- self.log.debug("Cloning to %s, ignore dirs: %s, root: %s" % (address, default_dirs, root_inner_path))
-
- # Copy root content.json
- if not new_site.storage.isFile("content.json") and not overwrite:
- # New site: Content.json not exist yet, create a new one from source site
- if "size_limit" in self.settings:
- new_site.settings["size_limit"] = self.settings["size_limit"]
-
- # Use content.json-default is specified
- if self.storage.isFile(root_inner_path + "/content.json-default"):
- content_json = self.storage.loadJson(root_inner_path + "/content.json-default")
- else:
- content_json = self.storage.loadJson("content.json")
-
- if "domain" in content_json:
- del content_json["domain"]
- content_json["title"] = "my" + content_json["title"]
- content_json["cloned_from"] = self.address
- content_json["clone_root"] = root_inner_path
- content_json["files"] = {}
- if address_index:
- content_json["address_index"] = address_index # Site owner's BIP32 index
- new_site.storage.writeJson("content.json", content_json)
- new_site.content_manager.loadContent(
- "content.json", add_bad_files=False, delete_removed_files=False, load_includes=False
- )
-
- # Copy files
- for content_inner_path, content in list(self.content_manager.contents.items()):
- file_relative_paths = list(content.get("files", {}).keys())
-
- # Sign content.json at the end to make sure every file is included
- file_relative_paths.sort()
- file_relative_paths.sort(key=lambda key: key.replace("-default", "").endswith("content.json"))
-
- for file_relative_path in file_relative_paths:
- file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to content.json
- file_inner_path = file_inner_path.strip("/") # Strip leading /
- if not file_inner_path.startswith(root_inner_path):
- self.log.debug("[SKIP] %s (not in clone root)" % file_inner_path)
- continue
- if file_inner_path.split("/")[0] in default_dirs: # Dont copy directories that has -default postfixed alternative
- self.log.debug("[SKIP] %s (has default alternative)" % file_inner_path)
- continue
- file_path = self.storage.getPath(file_inner_path)
-
- # Copy the file normally to keep the -default postfixed dir and file to allow cloning later
- if root_inner_path:
- file_inner_path_dest = re.sub("^%s/" % re.escape(root_inner_path), "", file_inner_path)
- file_path_dest = new_site.storage.getPath(file_inner_path_dest)
- else:
- file_inner_path_dest = file_inner_path
- file_path_dest = new_site.storage.getPath(file_inner_path)
-
- self.log.debug("[COPY] %s to %s..." % (file_inner_path, file_path_dest))
- dest_dir = os.path.dirname(file_path_dest)
- if not os.path.isdir(dest_dir):
- os.makedirs(dest_dir)
- if file_inner_path_dest.replace("-default", "") == "content.json": # Don't copy root content.json-default
- continue
-
- shutil.copy(file_path, file_path_dest)
-
- # If -default in path, create a -default less copy of the file
- if "-default" in file_inner_path_dest:
- file_path_dest = new_site.storage.getPath(file_inner_path_dest.replace("-default", ""))
- if new_site.storage.isFile(file_inner_path_dest.replace("-default", "")) and not overwrite:
- # Don't overwrite site files with default ones
- self.log.debug("[SKIP] Default file: %s (already exist)" % file_inner_path)
- continue
- self.log.debug("[COPY] Default file: %s to %s..." % (file_inner_path, file_path_dest))
- dest_dir = os.path.dirname(file_path_dest)
- if not os.path.isdir(dest_dir):
- os.makedirs(dest_dir)
- shutil.copy(file_path, file_path_dest)
- # Sign if content json
- if file_path_dest.endswith("/content.json"):
- new_site.storage.onUpdated(file_inner_path_dest.replace("-default", ""))
- new_site.content_manager.loadContent(
- file_inner_path_dest.replace("-default", ""), add_bad_files=False,
- delete_removed_files=False, load_includes=False
- )
- if privatekey:
- new_site.content_manager.sign(file_inner_path_dest.replace("-default", ""), privatekey, remove_missing_optional=True)
- new_site.content_manager.loadContent(
- file_inner_path_dest, add_bad_files=False, delete_removed_files=False, load_includes=False
- )
-
- if privatekey:
- new_site.content_manager.sign("content.json", privatekey, remove_missing_optional=True)
- new_site.content_manager.loadContent(
- "content.json", add_bad_files=False, delete_removed_files=False, load_includes=False
- )
-
- # Rebuild DB
- if new_site.storage.isFile("dbschema.json"):
- new_site.storage.closeDb()
- try:
- new_site.storage.rebuildDb()
- except Exception as err:
- self.log.error(err)
-
- return new_site
-
- @util.Pooled(100)
- def pooledNeedFile(self, *args, **kwargs):
- return self.needFile(*args, **kwargs)
-
- def isFileDownloadAllowed(self, inner_path, file_info):
- # Verify space for all site
- if self.settings["size"] > self.getSizeLimit() * 1024 * 1024:
- return False
- # Verify space for file
- if file_info.get("size", 0) > config.file_size_limit * 1024 * 1024:
- self.log.debug(
- "File size %s too large: %sMB > %sMB, skipping..." %
- (inner_path, file_info.get("size", 0) / 1024 / 1024, config.file_size_limit)
- )
- return False
- else:
- return True
-
- def needFileInfo(self, inner_path):
- file_info = self.content_manager.getFileInfo(inner_path)
- if not file_info:
- # No info for file, download all content.json first
- self.log.debug("No info for %s, waiting for all content.json" % inner_path)
- success = self.downloadContent("content.json", download_files=False)
- if not success:
- return False
- file_info = self.content_manager.getFileInfo(inner_path)
- return file_info
-
- # Check and download if file not exist
- def needFile(self, inner_path, update=False, blocking=True, peer=None, priority=0):
- if self.worker_manager.tasks.findTask(inner_path):
- task = self.worker_manager.addTask(inner_path, peer, priority=priority)
- if blocking:
- return task["evt"].get()
- else:
- return task["evt"]
- elif self.storage.isFile(inner_path) and not update: # File exist, no need to do anything
- return True
- elif not self.isServing(): # Site not serving
- return False
- else: # Wait until file downloaded
- if not self.content_manager.contents.get("content.json"): # No content.json, download it first!
- self.log.debug("Need content.json first (inner_path: %s, priority: %s)" % (inner_path, priority))
- if priority > 0:
- gevent.spawn(self.announce)
- if inner_path != "content.json": # Prevent double download
- task = self.worker_manager.addTask("content.json", peer)
- task["evt"].get()
- self.content_manager.loadContent()
- if not self.content_manager.contents.get("content.json"):
- return False # Content.json download failed
-
- file_info = None
- if not inner_path.endswith("content.json"):
- file_info = self.needFileInfo(inner_path)
- if not file_info:
- return False
- if "cert_signers" in file_info and not file_info["content_inner_path"] in self.content_manager.contents:
- self.log.debug("Missing content.json for requested user file: %s" % inner_path)
- if self.bad_files.get(file_info["content_inner_path"], 0) > 5:
- self.log.debug("File %s not reachable: retry %s" % (
- inner_path, self.bad_files.get(file_info["content_inner_path"], 0)
- ))
- return False
- self.downloadContent(file_info["content_inner_path"])
-
- if not self.isFileDownloadAllowed(inner_path, file_info):
- self.log.debug("%s: Download not allowed" % inner_path)
- return False
-
- self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1 # Mark as bad file
-
- task = self.worker_manager.addTask(inner_path, peer, priority=priority, file_info=file_info)
- if blocking:
- return task["evt"].get()
- else:
- return task["evt"]
-
- # Add or update a peer to site
- # return_peer: Always return the peer even if it was already present
- def addPeer(self, ip, port, return_peer=False, connection=None, source="other"):
- if not ip or ip == "0.0.0.0":
- return False
-
- key = "%s:%s" % (ip, port)
- peer = self.peers.get(key)
- if peer: # Already has this ip
- peer.found(source)
- if return_peer: # Always return peer
- return peer
- else:
- return False
- else: # New peer
- if (ip, port) in self.peer_blacklist:
- return False # Ignore blacklist (eg. myself)
- peer = Peer(ip, port, self)
- self.peers[key] = peer
- peer.found(source)
- return peer
-
- def announce(self, *args, **kwargs):
- if self.isServing():
- self.announcer.announce(*args, **kwargs)
-
- # Keep connections to get the updates
- def needConnections(self, num=None, check_site_on_reconnect=False):
- if num is None:
- if len(self.peers) < 50:
- num = 3
- else:
- num = 6
- need = min(len(self.peers), num, config.connected_limit) # Need 5 peer, but max total peers
-
- connected = len(self.getConnectedPeers())
-
- connected_before = connected
-
- self.log.debug("Need connections: %s, Current: %s, Total: %s" % (need, connected, len(self.peers)))
-
- if connected < need: # Need more than we have
- for peer in self.getRecentPeers(30):
- if not peer.connection or not peer.connection.connected: # No peer connection or disconnected
- peer.pex() # Initiate peer exchange
- if peer.connection and peer.connection.connected:
- connected += 1 # Successfully connected
- if connected >= need:
- break
- self.log.debug(
- "Connected before: %s, after: %s. Check site: %s." %
- (connected_before, connected, check_site_on_reconnect)
- )
-
- if check_site_on_reconnect and connected_before == 0 and connected > 0 and self.connection_server.has_internet:
- gevent.spawn(self.update, check_files=False)
-
- return connected
-
- # Return: Probably peers verified to be connectable recently
- def getConnectablePeers(self, need_num=5, ignore=[], allow_private=True):
- peers = list(self.peers.values())
- found = []
- for peer in peers:
- if peer.key.endswith(":0"):
- continue # Not connectable
- if not peer.connection:
- continue # No connection
- if peer.ip.endswith(".onion") and not self.connection_server.tor_manager.enabled:
- continue # Onion not supported
- if peer.key in ignore:
- continue # The requester has this peer
- if time.time() - peer.connection.last_recv_time > 60 * 60 * 2: # Last message more than 2 hours ago
- peer.connection = None # Cleanup: Dead connection
- continue
- if not allow_private and helper.isPrivateIp(peer.ip):
- continue
- found.append(peer)
- if len(found) >= need_num:
- break # Found requested number of peers
-
- if len(found) < need_num: # Return not that good peers
- found += [
- peer for peer in peers
- if not peer.key.endswith(":0") and
- peer.key not in ignore and
- (allow_private or not helper.isPrivateIp(peer.ip))
- ][0:need_num - len(found)]
-
- return found
-
- # Return: Recently found peers
- def getRecentPeers(self, need_num):
- found = list(set(self.peers_recent))
- self.log.debug(
- "Recent peers %s of %s (need: %s)" %
- (len(found), len(self.peers), need_num)
- )
-
- if len(found) >= need_num or len(found) >= len(self.peers):
- return sorted(
- found,
- key=lambda peer: peer.reputation,
- reverse=True
- )[0:need_num]
-
- # Add random peers
- need_more = need_num - len(found)
- if not self.connection_server.tor_manager.enabled:
- peers = [peer for peer in self.peers.values() if not peer.ip.endswith(".onion")]
- else:
- peers = list(self.peers.values())
-
- found_more = sorted(
- peers[0:need_more * 50],
- key=lambda peer: peer.reputation,
- reverse=True
- )[0:need_more * 2]
-
- found += found_more
-
- return found[0:need_num]
-
- def getConnectedPeers(self):
- back = []
- if not self.connection_server:
- return []
-
- tor_manager = self.connection_server.tor_manager
- for connection in self.connection_server.connections:
- if not connection.connected and time.time() - connection.start_time > 20: # Still not connected after 20s
- continue
- peer = self.peers.get("%s:%s" % (connection.ip, connection.port))
- if peer:
- if connection.ip.endswith(".onion") and connection.target_onion and tor_manager.start_onions:
- # Check if the connection is made with the onion address created for the site
- valid_target_onions = (tor_manager.getOnion(self.address), tor_manager.getOnion("global"))
- if connection.target_onion not in valid_target_onions:
- continue
- if not peer.connection:
- peer.connect(connection)
- back.append(peer)
- return back
-
- # Cleanup probably dead peers and close connection if too much
- def cleanupPeers(self, peers_protected=[]):
- peers = list(self.peers.values())
- if len(peers) > 20:
- # Cleanup old peers
- removed = 0
- if len(peers) > 1000:
- ttl = 60 * 60 * 1
- else:
- ttl = 60 * 60 * 4
-
- for peer in peers:
- if peer.connection and peer.connection.connected:
- continue
- if peer.connection and not peer.connection.connected:
- peer.connection = None # Dead connection
- if time.time() - peer.time_found > ttl: # Not found on tracker or via pex in last 4 hour
- peer.remove("Time found expired")
- removed += 1
- if removed > len(peers) * 0.1: # Don't remove too much at once
- break
-
- if removed:
- self.log.debug("Cleanup peers result: Removed %s, left: %s" % (removed, len(self.peers)))
-
- # Close peers over the limit
- closed = 0
- connected_peers = [peer for peer in self.getConnectedPeers() if peer.connection.connected] # Only fully connected peers
- need_to_close = len(connected_peers) - config.connected_limit
-
- if closed < need_to_close:
- # Try to keep connections with more sites
- for peer in sorted(connected_peers, key=lambda peer: min(peer.connection.sites, 5)):
- if not peer.connection:
- continue
- if peer.key in peers_protected:
- continue
- if peer.connection.sites > 5:
- break
- peer.connection.close("Cleanup peers")
- peer.connection = None
- closed += 1
- if closed >= need_to_close:
- break
-
- if need_to_close > 0:
- self.log.debug("Connected: %s, Need to close: %s, Closed: %s" % (len(connected_peers), need_to_close, closed))
-
- # Send hashfield to peers
- def sendMyHashfield(self, limit=5):
- if not self.content_manager.hashfield: # No optional files
- return False
-
- sent = 0
- connected_peers = self.getConnectedPeers()
- for peer in connected_peers:
- if peer.sendMyHashfield():
- sent += 1
- if sent >= limit:
- break
- if sent:
- my_hashfield_changed = self.content_manager.hashfield.time_changed
- self.log.debug("Sent my hashfield (chaged %.3fs ago) to %s peers" % (time.time() - my_hashfield_changed, sent))
- return sent
-
- # Update hashfield
- def updateHashfield(self, limit=5):
- # Return if no optional files
- if not self.content_manager.hashfield and not self.content_manager.has_optional_files:
- return False
-
- s = time.time()
- queried = 0
- connected_peers = self.getConnectedPeers()
- for peer in connected_peers:
- if peer.time_hashfield:
- continue
- if peer.updateHashfield():
- queried += 1
- if queried >= limit:
- break
- if queried:
- self.log.debug("Queried hashfield from %s peers in %.3fs" % (queried, time.time() - s))
- return queried
-
- # Returns if the optional file is need to be downloaded or not
- def isDownloadable(self, inner_path):
- return self.settings.get("autodownloadoptional")
-
- def delete(self):
- self.log.info("Deleting site...")
- s = time.time()
- self.settings["serving"] = False
- self.settings["deleting"] = True
- self.saveSettings()
- num_greenlets = self.greenlet_manager.stopGreenlets("Site %s deleted" % self.address)
- self.worker_manager.running = False
- num_workers = self.worker_manager.stopWorkers()
- SiteManager.site_manager.delete(self.address)
- self.content_manager.contents.db.deleteSite(self)
- self.updateWebsocket(deleted=True)
- self.storage.deleteFiles()
- self.log.info(
- "Deleted site in %.3fs (greenlets: %s, workers: %s)" %
- (time.time() - s, num_greenlets, num_workers)
- )
-
- # - Events -
-
- # Add event listeners
- def addEventListeners(self):
- self.onFileStart = util.Event() # If WorkerManager added new task
- self.onFileDone = util.Event() # If WorkerManager successfully downloaded a file
- self.onFileFail = util.Event() # If WorkerManager failed to download a file
- self.onComplete = util.Event() # All file finished
-
- self.onFileStart.append(lambda inner_path: self.fileStarted()) # No parameters to make Noparallel batching working
- self.onFileDone.append(lambda inner_path: self.fileDone(inner_path))
- self.onFileFail.append(lambda inner_path: self.fileFailed(inner_path))
-
- # Send site status update to websocket clients
- def updateWebsocket(self, **kwargs):
- if kwargs:
- param = {"event": list(kwargs.items())[0]}
- else:
- param = None
- for ws in self.websockets:
- ws.event("siteChanged", self, param)
-
- def messageWebsocket(self, message, type="info", progress=None):
- for ws in self.websockets:
- if progress is None:
- ws.cmd("notification", [type, message])
- else:
- ws.cmd("progress", [type, message, progress])
-
- # File download started
- @util.Noparallel(blocking=False)
- def fileStarted(self):
- time.sleep(0.001) # Wait for other files adds
- self.updateWebsocket(file_started=True)
-
- # File downloaded successful
- def fileDone(self, inner_path):
- # File downloaded, remove it from bad files
- if inner_path in self.bad_files:
- if config.verbose:
- self.log.debug("Bad file solved: %s" % inner_path)
- del(self.bad_files[inner_path])
-
- # Update content.json last downlad time
- if inner_path == "content.json":
- if not self.settings.get("downloaded"):
- self.settings["downloaded"] = int(time.time())
- self.content_updated = time.time()
-
- self.updateWebsocket(file_done=inner_path)
-
- # File download failed
- def fileFailed(self, inner_path):
- if inner_path == "content.json":
- self.content_updated = False
- self.log.debug("Can't update content.json")
- if inner_path in self.bad_files and self.connection_server.has_internet:
- self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1
-
- self.updateWebsocket(file_failed=inner_path)
-
- if self.bad_files.get(inner_path, 0) > 30:
- self.fileForgot(inner_path)
-
- def fileForgot(self, inner_path):
- self.log.debug("Giving up on %s" % inner_path)
- del self.bad_files[inner_path] # Give up after 30 tries
diff --git a/src/Site/SiteAnnouncer.py b/src/Site/SiteAnnouncer.py
deleted file mode 100644
index 2fd63e82..00000000
--- a/src/Site/SiteAnnouncer.py
+++ /dev/null
@@ -1,293 +0,0 @@
-import random
-import time
-import hashlib
-import re
-import collections
-
-import gevent
-
-from Plugin import PluginManager
-from Config import config
-from Debug import Debug
-from util import helper
-from greenlet import GreenletExit
-import util
-
-
-class AnnounceError(Exception):
- pass
-
-global_stats = collections.defaultdict(lambda: collections.defaultdict(int))
-
-
-@PluginManager.acceptPlugins
-class SiteAnnouncer(object):
- def __init__(self, site):
- self.site = site
- self.stats = {}
- self.fileserver_port = config.fileserver_port
- self.peer_id = self.site.connection_server.peer_id
- self.last_tracker_id = random.randint(0, 10)
- self.time_last_announce = 0
-
- def getTrackers(self):
- return config.trackers
-
- def getSupportedTrackers(self):
- trackers = self.getTrackers()
-
- if not self.site.connection_server.tor_manager.enabled:
- trackers = [tracker for tracker in trackers if ".onion" not in tracker]
-
- trackers = [tracker for tracker in trackers if self.getAddressParts(tracker)] # Remove trackers with unknown address
-
- if "ipv6" not in self.site.connection_server.supported_ip_types:
- trackers = [tracker for tracker in trackers if helper.getIpType(self.getAddressParts(tracker)["ip"]) != "ipv6"]
-
- return trackers
-
- def getAnnouncingTrackers(self, mode):
- trackers = self.getSupportedTrackers()
-
- if trackers and (mode == "update" or mode == "more"): # Only announce on one tracker, increment the queried tracker id
- self.last_tracker_id += 1
- self.last_tracker_id = self.last_tracker_id % len(trackers)
- trackers_announcing = [trackers[self.last_tracker_id]] # We only going to use this one
- else:
- trackers_announcing = trackers
-
- return trackers_announcing
-
- def getOpenedServiceTypes(self):
- back = []
- # Type of addresses they can reach me
- if config.trackers_proxy == "disable" and config.tor != "always":
- for ip_type, opened in list(self.site.connection_server.port_opened.items()):
- if opened:
- back.append(ip_type)
- if self.site.connection_server.tor_manager.start_onions:
- back.append("onion")
- return back
-
- @util.Noparallel(blocking=False)
- def announce(self, force=False, mode="start", pex=True):
- if time.time() - self.time_last_announce < 30 and not force:
- return # No reannouncing within 30 secs
- if force:
- self.site.log.debug("Force reannounce in mode %s" % mode)
-
- self.fileserver_port = config.fileserver_port
- self.time_last_announce = time.time()
-
- trackers = self.getAnnouncingTrackers(mode)
-
- if config.verbose:
- self.site.log.debug("Tracker announcing, trackers: %s" % trackers)
-
- errors = []
- slow = []
- s = time.time()
- threads = []
- num_announced = 0
-
- for tracker in trackers: # Start announce threads
- tracker_stats = global_stats[tracker]
- # Reduce the announce time for trackers that looks unreliable
- time_announce_allowed = time.time() - 60 * min(30, tracker_stats["num_error"])
- if tracker_stats["num_error"] > 5 and tracker_stats["time_request"] > time_announce_allowed and not force:
- if config.verbose:
- self.site.log.debug("Tracker %s looks unreliable, announce skipped (error: %s)" % (tracker, tracker_stats["num_error"]))
- continue
- thread = self.site.greenlet_manager.spawn(self.announceTracker, tracker, mode=mode)
- threads.append(thread)
- thread.tracker = tracker
-
- time.sleep(0.01)
- self.updateWebsocket(trackers="announcing")
-
- gevent.joinall(threads, timeout=20) # Wait for announce finish
-
- for thread in threads:
- if thread.value is None:
- continue
- if thread.value is not False:
- if thread.value > 1.0: # Takes more than 1 second to announce
- slow.append("%.2fs %s" % (thread.value, thread.tracker))
- num_announced += 1
- else:
- if thread.ready():
- errors.append(thread.tracker)
- else: # Still running
- slow.append("30s+ %s" % thread.tracker)
-
- # Save peers num
- self.site.settings["peers"] = len(self.site.peers)
-
- if len(errors) < len(threads): # At least one tracker finished
- if len(trackers) == 1:
- announced_to = trackers[0]
- else:
- announced_to = "%s/%s trackers" % (num_announced, len(threads))
- if mode != "update" or config.verbose:
- self.site.log.debug(
- "Announced in mode %s to %s in %.3fs, errors: %s, slow: %s" %
- (mode, announced_to, time.time() - s, errors, slow)
- )
- else:
- if len(threads) > 1:
- self.site.log.error("Announce to %s trackers in %.3fs, failed" % (len(threads), time.time() - s))
- if len(threads) == 1 and mode != "start": # Move to next tracker
- self.site.log.debug("Tracker failed, skipping to next one...")
- self.site.greenlet_manager.spawnLater(1.0, self.announce, force=force, mode=mode, pex=pex)
-
- self.updateWebsocket(trackers="announced")
-
- if pex:
- self.updateWebsocket(pex="announcing")
- if mode == "more": # Need more peers
- self.announcePex(need_num=10)
- else:
- self.announcePex()
-
- self.updateWebsocket(pex="announced")
-
- def getTrackerHandler(self, protocol):
- return None
-
- def getAddressParts(self, tracker):
- if "://" not in tracker or not re.match("^[A-Za-z0-9:/\\.#-]+$", tracker):
- return None
- protocol, address = tracker.split("://", 1)
- if ":" in address:
- ip, port = address.rsplit(":", 1)
- else:
- ip = address
- if protocol.startswith("https"):
- port = 443
- else:
- port = 80
- back = {}
- back["protocol"] = protocol
- back["address"] = address
- back["ip"] = ip
- back["port"] = port
- return back
-
- def announceTracker(self, tracker, mode="start", num_want=10):
- s = time.time()
- address_parts = self.getAddressParts(tracker)
- if not address_parts:
- self.site.log.warning("Tracker %s error: Invalid address" % tracker)
- return False
-
- if tracker not in self.stats:
- self.stats[tracker] = {"status": "", "num_request": 0, "num_success": 0, "num_error": 0, "time_request": 0, "time_last_error": 0}
-
- last_status = self.stats[tracker]["status"]
- self.stats[tracker]["status"] = "announcing"
- self.stats[tracker]["time_request"] = time.time()
- global_stats[tracker]["time_request"] = time.time()
- if config.verbose:
- self.site.log.debug("Tracker announcing to %s (mode: %s)" % (tracker, mode))
- if mode == "update":
- num_want = 10
- else:
- num_want = 30
-
- handler = self.getTrackerHandler(address_parts["protocol"])
- error = None
- try:
- if handler:
- peers = handler(address_parts["address"], mode=mode, num_want=num_want)
- else:
- raise AnnounceError("Unknown protocol: %s" % address_parts["protocol"])
- except Exception as err:
- self.site.log.warning("Tracker %s announce failed: %s in mode %s" % (tracker, Debug.formatException(err), mode))
- error = err
-
- if error:
- self.stats[tracker]["status"] = "error"
- self.stats[tracker]["time_status"] = time.time()
- self.stats[tracker]["last_error"] = str(error)
- self.stats[tracker]["time_last_error"] = time.time()
- if self.site.connection_server.has_internet:
- self.stats[tracker]["num_error"] += 1
- self.stats[tracker]["num_request"] += 1
- global_stats[tracker]["num_request"] += 1
- if self.site.connection_server.has_internet:
- global_stats[tracker]["num_error"] += 1
- self.updateWebsocket(tracker="error")
- return False
-
- if peers is None: # Announce skipped
- self.stats[tracker]["time_status"] = time.time()
- self.stats[tracker]["status"] = last_status
- return None
-
- self.stats[tracker]["status"] = "announced"
- self.stats[tracker]["time_status"] = time.time()
- self.stats[tracker]["num_success"] += 1
- self.stats[tracker]["num_request"] += 1
- global_stats[tracker]["num_request"] += 1
- global_stats[tracker]["num_error"] = 0
-
- if peers is True: # Announce success, but no peers returned
- return time.time() - s
-
- # Adding peers
- added = 0
- for peer in peers:
- if peer["port"] == 1: # Some trackers does not accept port 0, so we send port 1 as not-connectable
- peer["port"] = 0
- if not peer["port"]:
- continue # Dont add peers with port 0
- if self.site.addPeer(peer["addr"], peer["port"], source="tracker"):
- added += 1
-
- if added:
- self.site.worker_manager.onPeers()
- self.site.updateWebsocket(peers_added=added)
-
- if config.verbose:
- self.site.log.debug(
- "Tracker result: %s://%s (found %s peers, new: %s, total: %s)" %
- (address_parts["protocol"], address_parts["address"], len(peers), added, len(self.site.peers))
- )
- return time.time() - s
-
- @util.Noparallel(blocking=False)
- def announcePex(self, query_num=2, need_num=5):
- peers = self.site.getConnectedPeers()
- if len(peers) == 0: # Wait 3s for connections
- time.sleep(3)
- peers = self.site.getConnectedPeers()
-
- if len(peers) == 0: # Small number of connected peers for this site, connect to any
- peers = list(self.site.getRecentPeers(20))
- need_num = 10
-
- random.shuffle(peers)
- done = 0
- total_added = 0
- for peer in peers:
- num_added = peer.pex(need_num=need_num)
- if num_added is not False:
- done += 1
- total_added += num_added
- if num_added:
- self.site.worker_manager.onPeers()
- self.site.updateWebsocket(peers_added=num_added)
- else:
- time.sleep(0.1)
- if done == query_num:
- break
- self.site.log.debug("Pex result: from %s peers got %s new peers." % (done, total_added))
-
- def updateWebsocket(self, **kwargs):
- if kwargs:
- param = {"event": list(kwargs.items())[0]}
- else:
- param = None
-
- for ws in self.site.websockets:
- ws.event("announcerChanged", self.site, param)
diff --git a/src/Site/SiteManager.py b/src/Site/SiteManager.py
deleted file mode 100644
index 684d69fc..00000000
--- a/src/Site/SiteManager.py
+++ /dev/null
@@ -1,226 +0,0 @@
-import json
-import logging
-import re
-import os
-import time
-import atexit
-
-import gevent
-
-import util
-from Plugin import PluginManager
-from Content import ContentDb
-from Config import config
-from util import helper
-from util import RateLimit
-from util import Cached
-
-
-@PluginManager.acceptPlugins
-class SiteManager(object):
- def __init__(self):
- self.log = logging.getLogger("SiteManager")
- self.log.debug("SiteManager created.")
- self.sites = {}
- self.sites_changed = int(time.time())
- self.loaded = False
- gevent.spawn(self.saveTimer)
- atexit.register(lambda: self.save(recalculate_size=True))
-
- # Load all sites from data/sites.json
- @util.Noparallel()
- def load(self, cleanup=True, startup=False):
- from Debug import Debug
- self.log.info("Loading sites... (cleanup: %s, startup: %s)" % (cleanup, startup))
- self.loaded = False
- from .Site import Site
- address_found = []
- added = 0
- load_s = time.time()
- # Load new adresses
- try:
- json_path = "%s/sites.json" % config.data_dir
- data = json.load(open(json_path))
- except Exception as err:
- raise Exception("Unable to load %s: %s" % (json_path, err))
-
- sites_need = []
-
- for address, settings in data.items():
- if address not in self.sites:
- if os.path.isfile("%s/%s/content.json" % (config.data_dir, address)):
- # Root content.json exists, try load site
- s = time.time()
- try:
- site = Site(address, settings=settings)
- site.content_manager.contents.get("content.json")
- except Exception as err:
- self.log.debug("Error loading site %s: %s" % (address, err))
- continue
- self.sites[address] = site
- self.log.debug("Loaded site %s in %.3fs" % (address, time.time() - s))
- added += 1
- elif startup:
- # No site directory, start download
- self.log.debug("Found new site in sites.json: %s" % address)
- sites_need.append([address, settings])
- added += 1
-
- address_found.append(address)
-
- # Remove deleted adresses
- if cleanup:
- for address in list(self.sites.keys()):
- if address not in address_found:
- del(self.sites[address])
- self.log.debug("Removed site: %s" % address)
-
- # Remove orpan sites from contentdb
- content_db = ContentDb.getContentDb()
- for row in content_db.execute("SELECT * FROM site").fetchall():
- address = row["address"]
- if address not in self.sites and address not in address_found:
- self.log.info("Deleting orphan site from content.db: %s" % address)
-
- try:
- content_db.execute("DELETE FROM site WHERE ?", {"address": address})
- except Exception as err:
- self.log.error("Can't delete site %s from content_db: %s" % (address, err))
-
- if address in content_db.site_ids:
- del content_db.site_ids[address]
- if address in content_db.sites:
- del content_db.sites[address]
-
- self.loaded = True
- for address, settings in sites_need:
- gevent.spawn(self.need, address, settings=settings)
- if added:
- self.log.info("Added %s sites in %.3fs" % (added, time.time() - load_s))
-
- def saveDelayed(self):
- RateLimit.callAsync("Save sites.json", allowed_again=5, func=self.save)
-
- def save(self, recalculate_size=False):
- if not self.sites:
- self.log.debug("Save skipped: No sites found")
- return
- if not self.loaded:
- self.log.debug("Save skipped: Not loaded")
- return
- s = time.time()
- data = {}
- # Generate data file
- s = time.time()
- for address, site in list(self.list().items()):
- if recalculate_size:
- site.settings["size"], site.settings["size_optional"] = site.content_manager.getTotalSize() # Update site size
- data[address] = site.settings
- data[address]["cache"] = site.getSettingsCache()
- time_generate = time.time() - s
-
- s = time.time()
- if data:
- helper.atomicWrite("%s/sites.json" % config.data_dir, helper.jsonDumps(data).encode("utf8"))
- else:
- self.log.debug("Save error: No data")
- time_write = time.time() - s
-
- # Remove cache from site settings
- for address, site in self.list().items():
- site.settings["cache"] = {}
-
- self.log.debug("Saved sites in %.2fs (generate: %.2fs, write: %.2fs)" % (time.time() - s, time_generate, time_write))
-
- def saveTimer(self):
- while 1:
- time.sleep(60 * 10)
- self.save(recalculate_size=True)
-
- # Checks if its a valid address
- def isAddress(self, address):
- return re.match("^[A-Za-z0-9]{26,35}$", address)
-
- def isDomain(self, address):
- return False
-
- @Cached(timeout=10)
- def isDomainCached(self, address):
- return self.isDomain(address)
-
- def resolveDomain(self, domain):
- return False
-
- @Cached(timeout=10)
- def resolveDomainCached(self, domain):
- return self.resolveDomain(domain)
-
- # Return: Site object or None if not found
- def get(self, address):
- if self.isDomainCached(address):
- address_resolved = self.resolveDomainCached(address)
- if address_resolved:
- address = address_resolved
-
- if not self.loaded: # Not loaded yet
- self.log.debug("Loading site: %s)..." % address)
- self.load()
- site = self.sites.get(address)
-
- return site
-
- def add(self, address, all_file=True, settings=None, **kwargs):
- from .Site import Site
- self.sites_changed = int(time.time())
- # Try to find site with differect case
- for recover_address, recover_site in list(self.sites.items()):
- if recover_address.lower() == address.lower():
- return recover_site
-
- if not self.isAddress(address):
- return False # Not address: %s % address
- self.log.debug("Added new site: %s" % address)
- config.loadTrackersFile()
- site = Site(address, settings=settings)
- self.sites[address] = site
- if not site.settings["serving"]: # Maybe it was deleted before
- site.settings["serving"] = True
- site.saveSettings()
- if all_file: # Also download user files on first sync
- site.download(check_size=True, blind_includes=True)
- return site
-
- # Return or create site and start download site files
- def need(self, address, *args, **kwargs):
- if self.isDomainCached(address):
- address_resolved = self.resolveDomainCached(address)
- if address_resolved:
- address = address_resolved
-
- site = self.get(address)
- if not site: # Site not exist yet
- site = self.add(address, *args, **kwargs)
- return site
-
- def delete(self, address):
- self.sites_changed = int(time.time())
- self.log.debug("Deleted site: %s" % address)
- del(self.sites[address])
- # Delete from sites.json
- self.save()
-
- # Lazy load sites
- def list(self):
- if not self.loaded: # Not loaded yet
- self.log.debug("Sites not loaded yet...")
- self.load(startup=True)
- return self.sites
-
-
-site_manager = SiteManager() # Singletone
-
-if config.action == "main": # Don't connect / add myself to peerlist
- peer_blacklist = [("127.0.0.1", config.fileserver_port), ("::1", config.fileserver_port)]
-else:
- peer_blacklist = []
-
diff --git a/src/Site/SiteStorage.py b/src/Site/SiteStorage.py
deleted file mode 100644
index c12a80b0..00000000
--- a/src/Site/SiteStorage.py
+++ /dev/null
@@ -1,631 +0,0 @@
-import os
-import re
-import shutil
-import json
-import time
-import errno
-from collections import defaultdict
-
-import sqlite3
-import gevent.event
-
-import util
-from util import SafeRe
-from Db.Db import Db
-from Debug import Debug
-from Config import config
-from util import helper
-from util import ThreadPool
-from Plugin import PluginManager
-from Translate import translate as _
-
-
-thread_pool_fs_read = ThreadPool.ThreadPool(config.threads_fs_read, name="FS read")
-thread_pool_fs_write = ThreadPool.ThreadPool(config.threads_fs_write, name="FS write")
-thread_pool_fs_batch = ThreadPool.ThreadPool(1, name="FS batch")
-
-
-@PluginManager.acceptPlugins
-class SiteStorage(object):
- def __init__(self, site, allow_create=True):
- self.site = site
- self.directory = "%s/%s" % (config.data_dir, self.site.address) # Site data diretory
- self.allowed_dir = os.path.abspath(self.directory) # Only serve file within this dir
- self.log = site.log
- self.db = None # Db class
- self.db_checked = False # Checked db tables since startup
- self.event_db_busy = None # Gevent AsyncResult if db is working on rebuild
- self.has_db = self.isFile("dbschema.json") # The site has schema
-
- if not os.path.isdir(self.directory):
- if allow_create:
- os.mkdir(self.directory) # Create directory if not found
- else:
- raise Exception("Directory not exists: %s" % self.directory)
-
- def getDbFile(self):
- if self.db:
- return self.db.schema["db_file"]
- else:
- if self.isFile("dbschema.json"):
- schema = self.loadJson("dbschema.json")
- return schema["db_file"]
- else:
- return False
-
- # Create new databaseobject with the site's schema
- def openDb(self, close_idle=False):
- schema = self.getDbSchema()
- db_path = self.getPath(schema["db_file"])
- return Db(schema, db_path, close_idle=close_idle)
-
- def closeDb(self, reason="Unknown (SiteStorage)"):
- if self.db:
- self.db.close(reason)
- self.event_db_busy = None
- self.db = None
-
- def getDbSchema(self):
- try:
- self.site.needFile("dbschema.json")
- schema = self.loadJson("dbschema.json")
- except Exception as err:
- raise Exception("dbschema.json is not a valid JSON: %s" % err)
- return schema
-
- def loadDb(self):
- self.log.debug("No database, waiting for dbschema.json...")
- self.site.needFile("dbschema.json", priority=3)
- self.log.debug("Got dbschema.json")
- self.has_db = self.isFile("dbschema.json") # Recheck if dbschema exist
- if self.has_db:
- schema = self.getDbSchema()
- db_path = self.getPath(schema["db_file"])
- if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0:
- try:
- self.rebuildDb(reason="Missing database")
- except Exception as err:
- self.log.error(err)
- pass
-
- if self.db:
- self.db.close("Gettig new db for SiteStorage")
- self.db = self.openDb(close_idle=True)
- try:
- changed_tables = self.db.checkTables()
- if changed_tables:
- self.rebuildDb(delete_db=False, reason="Changed tables") # TODO: only update the changed table datas
- except sqlite3.OperationalError:
- pass
-
- # Return db class
- @util.Noparallel()
- def getDb(self):
- if self.event_db_busy: # Db not ready for queries
- self.log.debug("Wating for db...")
- self.event_db_busy.get() # Wait for event
- if not self.db:
- self.loadDb()
- return self.db
-
- def updateDbFile(self, inner_path, file=None, cur=None):
- path = self.getPath(inner_path)
- if cur:
- db = cur.db
- else:
- db = self.getDb()
- return db.updateJson(path, file, cur)
-
- # Return possible db files for the site
- @thread_pool_fs_read.wrap
- def getDbFiles(self):
- found = 0
- for content_inner_path, content in self.site.content_manager.contents.items():
- # content.json file itself
- if self.isFile(content_inner_path):
- yield content_inner_path, self.getPath(content_inner_path)
- else:
- self.log.debug("[MISSING] %s" % content_inner_path)
- # Data files in content.json
- content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site
- for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()):
- if not file_relative_path.endswith(".json") and not file_relative_path.endswith("json.gz"):
- continue # We only interesed in json files
- file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir
- file_inner_path = file_inner_path.strip("/") # Strip leading /
- if self.isFile(file_inner_path):
- yield file_inner_path, self.getPath(file_inner_path)
- else:
- self.log.debug("[MISSING] %s" % file_inner_path)
- found += 1
- if found % 100 == 0:
- time.sleep(0.001) # Context switch to avoid UI block
-
- # Rebuild sql cache
- @util.Noparallel()
- @thread_pool_fs_batch.wrap
- def rebuildDb(self, delete_db=True, reason="Unknown"):
- self.log.info("Rebuilding db (reason: %s)..." % reason)
- self.has_db = self.isFile("dbschema.json")
- if not self.has_db:
- return False
-
- schema = self.loadJson("dbschema.json")
- db_path = self.getPath(schema["db_file"])
- if os.path.isfile(db_path) and delete_db:
- if self.db:
- self.closeDb("rebuilding") # Close db if open
- time.sleep(0.5)
- self.log.info("Deleting %s" % db_path)
- try:
- os.unlink(db_path)
- except Exception as err:
- self.log.error("Delete error: %s" % err)
-
- if not self.db:
- self.db = self.openDb()
- self.event_db_busy = gevent.event.AsyncResult()
-
- self.log.info("Rebuild: Creating tables...")
-
- # raise DbTableError if not valid
- self.db.checkTables()
-
- cur = self.db.getCursor()
- cur.logging = False
- s = time.time()
- self.log.info("Rebuild: Getting db files...")
- db_files = list(self.getDbFiles())
- num_imported = 0
- num_total = len(db_files)
- num_error = 0
-
- self.log.info("Rebuild: Importing data...")
- try:
- if num_total > 100:
- self.site.messageWebsocket(
- _["Database rebuilding... Imported {0} of {1} files (error: {2})..."].format(
- "0000", num_total, num_error
- ), "rebuild", 0
- )
- for file_inner_path, file_path in db_files:
- try:
- if self.updateDbFile(file_inner_path, file=open(file_path, "rb"), cur=cur):
- num_imported += 1
- except Exception as err:
- self.log.error("Error importing %s: %s" % (file_inner_path, Debug.formatException(err)))
- num_error += 1
-
- if num_imported and num_imported % 100 == 0:
- self.site.messageWebsocket(
- _["Database rebuilding... Imported {0} of {1} files (error: {2})..."].format(
- num_imported, num_total, num_error
- ),
- "rebuild", int(float(num_imported) / num_total * 100)
- )
- time.sleep(0.001) # Context switch to avoid UI block
-
- finally:
- cur.close()
- if num_total > 100:
- self.site.messageWebsocket(
- _["Database rebuilding... Imported {0} of {1} files (error: {2})..."].format(
- num_imported, num_total, num_error
- ), "rebuild", 100
- )
- self.log.info("Rebuild: Imported %s data file in %.3fs" % (num_imported, time.time() - s))
- self.event_db_busy.set(True) # Event done, notify waiters
- self.event_db_busy = None # Clear event
- self.db.commit("Rebuilt")
-
- return True
-
- # Execute sql query or rebuild on dberror
- def query(self, query, params=None):
- if not query.strip().upper().startswith("SELECT"):
- raise Exception("Only SELECT query supported")
-
- try:
- res = self.getDb().execute(query, params)
- except sqlite3.DatabaseError as err:
- if err.__class__.__name__ == "DatabaseError":
- self.log.error("Database error: %s, query: %s, try to rebuilding it..." % (err, query))
- try:
- self.rebuildDb(reason="Query error")
- except sqlite3.OperationalError:
- pass
- res = self.db.cur.execute(query, params)
- else:
- raise err
- return res
-
- def ensureDir(self, inner_path):
- try:
- os.makedirs(self.getPath(inner_path))
- except OSError as err:
- if err.errno == errno.EEXIST:
- return False
- else:
- raise err
- return True
-
- # Open file object
- def open(self, inner_path, mode="rb", create_dirs=False, **kwargs):
- file_path = self.getPath(inner_path)
- if create_dirs:
- file_inner_dir = os.path.dirname(inner_path)
- self.ensureDir(file_inner_dir)
- return open(file_path, mode, **kwargs)
-
- # Open file object
- @thread_pool_fs_read.wrap
- def read(self, inner_path, mode="rb"):
- return open(self.getPath(inner_path), mode).read()
-
- @thread_pool_fs_write.wrap
- def writeThread(self, inner_path, content):
- file_path = self.getPath(inner_path)
- # Create dir if not exist
- self.ensureDir(os.path.dirname(inner_path))
- # Write file
- if hasattr(content, 'read'): # File-like object
-
- with open(file_path, "wb") as file:
- shutil.copyfileobj(content, file) # Write buff to disk
- else: # Simple string
- if inner_path == "content.json" and os.path.isfile(file_path):
- helper.atomicWrite(file_path, content)
- else:
- with open(file_path, "wb") as file:
- file.write(content)
-
- # Write content to file
- def write(self, inner_path, content):
- self.writeThread(inner_path, content)
- self.onUpdated(inner_path)
-
- # Remove file from filesystem
- def delete(self, inner_path):
- file_path = self.getPath(inner_path)
- os.unlink(file_path)
- self.onUpdated(inner_path, file=False)
-
- def deleteDir(self, inner_path):
- dir_path = self.getPath(inner_path)
- os.rmdir(dir_path)
-
- def rename(self, inner_path_before, inner_path_after):
- for retry in range(3):
- rename_err = None
- # To workaround "The process cannot access the file beacause it is being used by another process." error
- try:
- os.rename(self.getPath(inner_path_before), self.getPath(inner_path_after))
- break
- except Exception as err:
- rename_err = err
- self.log.error("%s rename error: %s (retry #%s)" % (inner_path_before, err, retry))
- time.sleep(0.1 + retry)
- if rename_err:
- raise rename_err
-
- # List files from a directory
- @thread_pool_fs_read.wrap
- def walk(self, dir_inner_path, ignore=None):
- directory = self.getPath(dir_inner_path)
- for root, dirs, files in os.walk(directory):
- root = root.replace("\\", "/")
- root_relative_path = re.sub("^%s" % re.escape(directory), "", root).lstrip("/")
- for file_name in files:
- if root_relative_path: # Not root dir
- file_relative_path = root_relative_path + "/" + file_name
- else:
- file_relative_path = file_name
-
- if ignore and SafeRe.match(ignore, file_relative_path):
- continue
-
- yield file_relative_path
-
- # Don't scan directory that is in the ignore pattern
- if ignore:
- dirs_filtered = []
- for dir_name in dirs:
- if root_relative_path:
- dir_relative_path = root_relative_path + "/" + dir_name
- else:
- dir_relative_path = dir_name
-
- if ignore == ".*" or re.match(".*([|(]|^)%s([|)]|$)" % re.escape(dir_relative_path + "/.*"), ignore):
- continue
-
- dirs_filtered.append(dir_name)
- dirs[:] = dirs_filtered
-
- # list directories in a directory
- @thread_pool_fs_read.wrap
- def list(self, dir_inner_path):
- directory = self.getPath(dir_inner_path)
- return os.listdir(directory)
-
- # Site content updated
- def onUpdated(self, inner_path, file=None):
- # Update Sql cache
- should_load_to_db = inner_path.endswith(".json") or inner_path.endswith(".json.gz")
- if inner_path == "dbschema.json":
- self.has_db = self.isFile("dbschema.json")
- # Reopen DB to check changes
- if self.has_db:
- self.closeDb("New dbschema")
- gevent.spawn(self.getDb)
- elif not config.disable_db and should_load_to_db and self.has_db: # Load json file to db
- if config.verbose:
- self.log.debug("Loading json file to db: %s (file: %s)" % (inner_path, file))
- try:
- self.updateDbFile(inner_path, file)
- except Exception as err:
- self.log.error("Json %s load error: %s" % (inner_path, Debug.formatException(err)))
- self.closeDb("Json load error")
-
- # Load and parse json file
- @thread_pool_fs_read.wrap
- def loadJson(self, inner_path):
- with self.open(inner_path, "r", encoding="utf8") as file:
- return json.load(file)
-
- # Write formatted json file
- def writeJson(self, inner_path, data):
- # Write to disk
- self.write(inner_path, helper.jsonDumps(data).encode("utf8"))
-
- # Get file size
- def getSize(self, inner_path):
- path = self.getPath(inner_path)
- try:
- return os.path.getsize(path)
- except Exception:
- return 0
-
- # File exist
- def isFile(self, inner_path):
- return os.path.isfile(self.getPath(inner_path))
-
- # File or directory exist
- def isExists(self, inner_path):
- return os.path.exists(self.getPath(inner_path))
-
- # Dir exist
- def isDir(self, inner_path):
- return os.path.isdir(self.getPath(inner_path))
-
- # Security check and return path of site's file
- def getPath(self, inner_path):
- inner_path = inner_path.replace("\\", "/") # Windows separator fix
- if not inner_path:
- return self.directory
-
- if "../" in inner_path:
- raise Exception("File not allowed: %s" % inner_path)
-
- return "%s/%s" % (self.directory, inner_path)
-
- # Get site dir relative path
- def getInnerPath(self, path):
- if path == self.directory:
- inner_path = ""
- else:
- if path.startswith(self.directory):
- inner_path = path[len(self.directory) + 1:]
- else:
- raise Exception("File not allowed: %s" % path)
- return inner_path
-
- # Verify all files sha512sum using content.json
- def verifyFiles(self, quick_check=False, add_optional=False, add_changed=True):
- bad_files = []
- back = defaultdict(int)
- back["bad_files"] = bad_files
- i = 0
- self.log.debug("Verifing files...")
-
- if not self.site.content_manager.contents.get("content.json"): # No content.json, download it first
- self.log.debug("VerifyFile content.json not exists")
- self.site.needFile("content.json", update=True) # Force update to fix corrupt file
- self.site.content_manager.loadContent() # Reload content.json
- for content_inner_path, content in list(self.site.content_manager.contents.items()):
- back["num_content"] += 1
- i += 1
- if i % 50 == 0:
- time.sleep(0.001) # Context switch to avoid gevent hangs
- if not os.path.isfile(self.getPath(content_inner_path)): # Missing content.json file
- back["num_content_missing"] += 1
- self.log.debug("[MISSING] %s" % content_inner_path)
- bad_files.append(content_inner_path)
-
- for file_relative_path in list(content.get("files", {}).keys()):
- back["num_file"] += 1
- file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
- file_inner_path = file_inner_path.strip("/") # Strip leading /
- file_path = self.getPath(file_inner_path)
- if not os.path.isfile(file_path):
- back["num_file_missing"] += 1
- self.log.debug("[MISSING] %s" % file_inner_path)
- bad_files.append(file_inner_path)
- continue
-
- if quick_check:
- ok = os.path.getsize(file_path) == content["files"][file_relative_path]["size"]
- if not ok:
- err = "Invalid size"
- else:
- try:
- ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
- except Exception as err:
- ok = False
-
- if not ok:
- back["num_file_invalid"] += 1
- self.log.debug("[INVALID] %s: %s" % (file_inner_path, err))
- if add_changed or content.get("cert_user_id"): # If updating own site only add changed user files
- bad_files.append(file_inner_path)
-
- # Optional files
- optional_added = 0
- optional_removed = 0
- for file_relative_path in list(content.get("files_optional", {}).keys()):
- back["num_optional"] += 1
- file_node = content["files_optional"][file_relative_path]
- file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
- file_inner_path = file_inner_path.strip("/") # Strip leading /
- file_path = self.getPath(file_inner_path)
- hash_id = self.site.content_manager.hashfield.getHashId(file_node["sha512"])
- if not os.path.isfile(file_path):
- if self.site.content_manager.isDownloaded(file_inner_path, hash_id):
- back["num_optional_removed"] += 1
- self.log.debug("[OPTIONAL MISSING] %s" % file_inner_path)
- self.site.content_manager.optionalRemoved(file_inner_path, hash_id, file_node["size"])
- if add_optional and self.site.isDownloadable(file_inner_path):
- self.log.debug("[OPTIONAL ADDING] %s" % file_inner_path)
- bad_files.append(file_inner_path)
- continue
-
- if quick_check:
- ok = os.path.getsize(file_path) == content["files_optional"][file_relative_path]["size"]
- else:
- try:
- ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
- except Exception as err:
- ok = False
-
- if ok:
- if not self.site.content_manager.isDownloaded(file_inner_path, hash_id):
- back["num_optional_added"] += 1
- self.site.content_manager.optionalDownloaded(file_inner_path, hash_id, file_node["size"])
- optional_added += 1
- self.log.debug("[OPTIONAL FOUND] %s" % file_inner_path)
- else:
- if self.site.content_manager.isDownloaded(file_inner_path, hash_id):
- back["num_optional_removed"] += 1
- self.site.content_manager.optionalRemoved(file_inner_path, hash_id, file_node["size"])
- optional_removed += 1
- bad_files.append(file_inner_path)
- self.log.debug("[OPTIONAL CHANGED] %s" % file_inner_path)
-
- if config.verbose:
- self.log.debug(
- "%s verified: %s, quick: %s, optionals: +%s -%s" %
- (content_inner_path, len(content["files"]), quick_check, optional_added, optional_removed)
- )
-
- self.site.content_manager.contents.db.processDelayed()
- time.sleep(0.001) # Context switch to avoid gevent hangs
- return back
-
- # Check and try to fix site files integrity
- def updateBadFiles(self, quick_check=True):
- s = time.time()
- res = self.verifyFiles(
- quick_check,
- add_optional=True,
- add_changed=not self.site.settings.get("own") # Don't overwrite changed files if site owned
- )
- bad_files = res["bad_files"]
- self.site.bad_files = {}
- if bad_files:
- for bad_file in bad_files:
- self.site.bad_files[bad_file] = 1
- self.log.debug("Checked files in %.2fs... Found bad files: %s, Quick:%s" % (time.time() - s, len(bad_files), quick_check))
-
- # Delete site's all file
- @thread_pool_fs_batch.wrap
- def deleteFiles(self):
- site_title = self.site.content_manager.contents.get("content.json", {}).get("title", self.site.address)
- message_id = "delete-%s" % self.site.address
- self.log.debug("Deleting files from content.json (title: %s)..." % site_title)
-
- files = [] # Get filenames
- content_inner_paths = list(self.site.content_manager.contents.keys())
- for i, content_inner_path in enumerate(content_inner_paths):
- content = self.site.content_manager.contents.get(content_inner_path, {})
- files.append(content_inner_path)
- # Add normal files
- for file_relative_path in list(content.get("files", {}).keys()):
- file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
- files.append(file_inner_path)
- # Add optional files
- for file_relative_path in list(content.get("files_optional", {}).keys()):
- file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
- files.append(file_inner_path)
-
- if i % 100 == 0:
- num_files = len(files)
- self.site.messageWebsocket(
- _("Deleting site {site_title} ... Collected {num_files} files"),
- message_id, (i / len(content_inner_paths)) * 25
- )
-
- if self.isFile("dbschema.json"):
- self.log.debug("Deleting db file...")
- self.closeDb("Deleting site")
- self.has_db = False
- try:
- schema = self.loadJson("dbschema.json")
- db_path = self.getPath(schema["db_file"])
- if os.path.isfile(db_path):
- os.unlink(db_path)
- except Exception as err:
- self.log.error("Db file delete error: %s" % err)
-
- num_files = len(files)
- for i, inner_path in enumerate(files):
- path = self.getPath(inner_path)
- if os.path.isfile(path):
- for retry in range(5):
- try:
- os.unlink(path)
- break
- except Exception as err:
- self.log.error("Error removing %s: %s, try #%s" % (inner_path, err, retry))
- time.sleep(float(retry) / 10)
- if i % 100 == 0:
- self.site.messageWebsocket(
- _("Deleting site {site_title} ... Deleting file {i}/{num_files}"),
- message_id, 25 + (i / num_files) * 50
- )
- self.onUpdated(inner_path, False)
-
- self.log.debug("Deleting empty dirs...")
- i = 0
- for root, dirs, files in os.walk(self.directory, topdown=False):
- for dir in dirs:
- path = os.path.join(root, dir)
- if os.path.isdir(path):
- try:
- i += 1
- if i % 100 == 0:
- self.site.messageWebsocket(
- _("Deleting site {site_title} ... Deleting empty directories {i}"),
- message_id, 85
- )
- os.rmdir(path)
- except OSError: # Not empty
- pass
-
- if os.path.isdir(self.directory) and os.listdir(self.directory) == []:
- os.rmdir(self.directory) # Remove sites directory if empty
-
- if os.path.isdir(self.directory):
- self.log.debug("Some unknown file remained in site data dir: %s..." % self.directory)
- self.site.messageWebsocket(
- _("Deleting site {site_title} ... Site deleted, but some unknown files left in the directory"),
- message_id, 100
- )
- return False # Some files not deleted
- else:
- self.log.debug("Site %s data directory deleted: %s..." % (site_title, self.directory))
-
- self.site.messageWebsocket(
- _("Deleting site {site_title} ... All files deleted successfully"),
- message_id, 100
- )
-
- return True # All clean
diff --git a/src/Site/__init__.py b/src/Site/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/src/Test/BenchmarkSsl.py b/src/Test/BenchmarkSsl.py
deleted file mode 100644
index 06181b89..00000000
--- a/src/Test/BenchmarkSsl.py
+++ /dev/null
@@ -1,162 +0,0 @@
-#!/usr/bin/python2
-from gevent import monkey
-monkey.patch_all()
-import os
-import time
-import sys
-import socket
-import ssl
-sys.path.append(os.path.abspath("..")) # Imports relative to src dir
-
-import io as StringIO
-import gevent
-
-from gevent.server import StreamServer
-from gevent.pool import Pool
-from Config import config
-config.parse()
-from util import SslPatch
-
-# Server
-socks = []
-data = os.urandom(1024 * 100)
-data += "\n"
-
-
-def handle(sock_raw, addr):
- socks.append(sock_raw)
- sock = sock_raw
- # sock = ctx.wrap_socket(sock, server_side=True)
- # if sock_raw.recv( 1, gevent.socket.MSG_PEEK ) == "\x16":
- # sock = gevent.ssl.wrap_socket(sock_raw, server_side=True, keyfile='key-cz.pem',
- # certfile='cert-cz.pem', ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1)
- # fp = os.fdopen(sock.fileno(), 'rb', 1024*512)
- try:
- while True:
- line = sock.recv(16 * 1024)
- if not line:
- break
- if line == "bye\n":
- break
- elif line == "gotssl\n":
- sock.sendall("yes\n")
- sock = gevent.ssl.wrap_socket(
- sock_raw, server_side=True, keyfile='../../data/key-rsa.pem', certfile='../../data/cert-rsa.pem',
- ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1
- )
- else:
- sock.sendall(data)
- except Exception as err:
- print(err)
- try:
- sock.shutdown(gevent.socket.SHUT_WR)
- sock.close()
- except:
- pass
- socks.remove(sock_raw)
-
-pool = Pool(1000) # do not accept more than 10000 connections
-server = StreamServer(('127.0.0.1', 1234), handle)
-server.start()
-
-
-# Client
-
-
-total_num = 0
-total_bytes = 0
-clipher = None
-ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:ECDH+AES128:ECDHE-RSA-AES128-GCM-SHA256:AES128-GCM-SHA256:AES128-SHA256:AES128-SHA:HIGH:" + \
- "!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK"
-
-# ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
-
-
-def getData():
- global total_num, total_bytes, clipher
- data = None
- sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- # sock = socket.ssl(s)
- # sock = ssl.wrap_socket(sock)
- sock.connect(("127.0.0.1", 1234))
- # sock.do_handshake()
- # clipher = sock.cipher()
- sock.send("gotssl\n")
- if sock.recv(128) == "yes\n":
- sock = ssl.wrap_socket(sock, ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1)
- sock.do_handshake()
- clipher = sock.cipher()
-
- for req in range(20):
- sock.sendall("req\n")
- buff = StringIO.StringIO()
- data = sock.recv(16 * 1024)
- buff.write(data)
- if not data:
- break
- while not data.endswith("\n"):
- data = sock.recv(16 * 1024)
- if not data:
- break
- buff.write(data)
- total_num += 1
- total_bytes += buff.tell()
- if not data:
- print("No data")
-
- sock.shutdown(gevent.socket.SHUT_WR)
- sock.close()
-
-s = time.time()
-
-
-def info():
- import psutil
- import os
- process = psutil.Process(os.getpid())
- if "memory_info" in dir(process):
- memory_info = process.memory_info
- else:
- memory_info = process.get_memory_info
- while 1:
- print(total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s, end=' ')
- print("using", clipher, "Mem:", memory_info()[0] / float(2 ** 20))
- time.sleep(1)
-
-gevent.spawn(info)
-
-for test in range(1):
- clients = []
- for i in range(500): # Thread
- clients.append(gevent.spawn(getData))
- gevent.joinall(clients)
-
-
-print(total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s)
-
-# Separate client/server process:
-# 10*10*100:
-# Raw: 10000 req 1000009 kbytes transfered in 5.39999985695
-# RSA 2048: 10000 req 1000009 kbytes transfered in 27.7890000343 using ('ECDHE-RSA-AES256-SHA', 'TLSv1/SSLv3', 256)
-# ECC: 10000 req 1000009 kbytes transfered in 26.1959998608 using ('ECDHE-ECDSA-AES256-SHA', 'TLSv1/SSLv3', 256)
-# ECC: 10000 req 1000009 kbytes transfered in 28.2410001755 using ('ECDHE-ECDSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 13.3828125
-#
-# 10*100*10:
-# Raw: 10000 req 1000009 kbytes transfered in 7.02700018883 Mem: 14.328125
-# RSA 2048: 10000 req 1000009 kbytes transfered in 44.8860001564 using ('ECDHE-RSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 20.078125
-# ECC: 10000 req 1000009 kbytes transfered in 37.9430000782 using ('ECDHE-ECDSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 20.0234375
-#
-# 1*100*100:
-# Raw: 10000 req 1000009 kbytes transfered in 4.64400005341 Mem: 14.06640625
-# RSA: 10000 req 1000009 kbytes transfered in 24.2300000191 using ('ECDHE-RSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 19.7734375
-# ECC: 10000 req 1000009 kbytes transfered in 22.8849999905 using ('ECDHE-ECDSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 17.8125
-# AES128: 10000 req 1000009 kbytes transfered in 21.2839999199 using ('AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 14.1328125
-# ECC+128: 10000 req 1000009 kbytes transfered in 20.496999979 using ('ECDHE-ECDSA-AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 14.40234375
-#
-#
-# Single process:
-# 1*100*100
-# RSA: 10000 req 1000009 kbytes transfered in 41.7899999619 using ('ECDHE-RSA-AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 26.91015625
-#
-# 10*10*100
-# RSA: 10000 req 1000009 kbytes transfered in 40.1640000343 using ('ECDHE-RSA-AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 14.94921875
diff --git a/src/Test/Spy.py b/src/Test/Spy.py
deleted file mode 100644
index 44422550..00000000
--- a/src/Test/Spy.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import logging
-
-class Spy:
- def __init__(self, obj, func_name):
- self.obj = obj
- self.__name__ = func_name
- self.func_original = getattr(self.obj, func_name)
- self.calls = []
-
- def __enter__(self, *args, **kwargs):
- logging.debug("Spy started")
- def loggedFunc(cls, *args, **kwargs):
- call = dict(enumerate(args, 1))
- call[0] = cls
- call.update(kwargs)
- logging.debug("Spy call: %s" % call)
- self.calls.append(call)
- return self.func_original(cls, *args, **kwargs)
- setattr(self.obj, self.__name__, loggedFunc)
- return self.calls
-
- def __exit__(self, *args, **kwargs):
- setattr(self.obj, self.__name__, self.func_original)
\ No newline at end of file
diff --git a/src/Test/TestCached.py b/src/Test/TestCached.py
deleted file mode 100644
index 088962c0..00000000
--- a/src/Test/TestCached.py
+++ /dev/null
@@ -1,59 +0,0 @@
-import time
-
-from util import Cached
-
-
-class CachedObject:
- def __init__(self):
- self.num_called_add = 0
- self.num_called_multiply = 0
- self.num_called_none = 0
-
- @Cached(timeout=1)
- def calcAdd(self, a, b):
- self.num_called_add += 1
- return a + b
-
- @Cached(timeout=1)
- def calcMultiply(self, a, b):
- self.num_called_multiply += 1
- return a * b
-
- @Cached(timeout=1)
- def none(self):
- self.num_called_none += 1
- return None
-
-
-class TestCached:
- def testNoneValue(self):
- cached_object = CachedObject()
- assert cached_object.none() is None
- assert cached_object.none() is None
- assert cached_object.num_called_none == 1
- time.sleep(2)
- assert cached_object.none() is None
- assert cached_object.num_called_none == 2
-
- def testCall(self):
- cached_object = CachedObject()
-
- assert cached_object.calcAdd(1, 2) == 3
- assert cached_object.calcAdd(1, 2) == 3
- assert cached_object.calcMultiply(1, 2) == 2
- assert cached_object.calcMultiply(1, 2) == 2
- assert cached_object.num_called_add == 1
- assert cached_object.num_called_multiply == 1
-
- assert cached_object.calcAdd(2, 3) == 5
- assert cached_object.calcAdd(2, 3) == 5
- assert cached_object.num_called_add == 2
-
- assert cached_object.calcAdd(1, 2) == 3
- assert cached_object.calcMultiply(2, 3) == 6
- assert cached_object.num_called_add == 2
- assert cached_object.num_called_multiply == 2
-
- time.sleep(2)
- assert cached_object.calcAdd(1, 2) == 3
- assert cached_object.num_called_add == 3
diff --git a/src/Test/TestConfig.py b/src/Test/TestConfig.py
deleted file mode 100644
index 24084392..00000000
--- a/src/Test/TestConfig.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import pytest
-
-import Config
-
-
-@pytest.mark.usefixtures("resetSettings")
-class TestConfig:
- def testParse(self):
- # Defaults
- config_test = Config.Config("zeronet.py".split(" "))
- config_test.parse(silent=True, parse_config=False)
- assert not config_test.debug
- assert not config_test.debug_socket
-
- # Test parse command line with unknown parameters (ui_password)
- config_test = Config.Config("zeronet.py --debug --debug_socket --ui_password hello".split(" "))
- config_test.parse(silent=True, parse_config=False)
- assert config_test.debug
- assert config_test.debug_socket
- with pytest.raises(AttributeError):
- config_test.ui_password
-
- # More complex test
- args = "zeronet.py --unknown_arg --debug --debug_socket --ui_restrict 127.0.0.1 1.2.3.4 "
- args += "--another_unknown argument --use_openssl False siteSign address privatekey --inner_path users/content.json"
- config_test = Config.Config(args.split(" "))
- config_test.parse(silent=True, parse_config=False)
- assert config_test.debug
- assert "1.2.3.4" in config_test.ui_restrict
- assert not config_test.use_openssl
- assert config_test.inner_path == "users/content.json"
diff --git a/src/Test/TestConnectionServer.py b/src/Test/TestConnectionServer.py
deleted file mode 100644
index 82ee605c..00000000
--- a/src/Test/TestConnectionServer.py
+++ /dev/null
@@ -1,118 +0,0 @@
-import time
-import socket
-import gevent
-
-import pytest
-import mock
-
-from Crypt import CryptConnection
-from Connection import ConnectionServer
-from Config import config
-
-
-@pytest.mark.usefixtures("resetSettings")
-class TestConnection:
- def testIpv6(self, file_server6):
- assert ":" in file_server6.ip
-
- client = ConnectionServer(file_server6.ip, 1545)
- connection = client.getConnection(file_server6.ip, 1544)
-
- assert connection.ping()
-
- # Close connection
- connection.close()
- client.stop()
- time.sleep(0.01)
- assert len(file_server6.connections) == 0
-
- # Should not able to reach on ipv4 ip
- with pytest.raises(socket.error) as err:
- client = ConnectionServer("127.0.0.1", 1545)
- connection = client.getConnection("127.0.0.1", 1544)
-
- def testSslConnection(self, file_server):
- client = ConnectionServer(file_server.ip, 1545)
- assert file_server != client
-
- # Connect to myself
- with mock.patch('Config.config.ip_local', return_value=[]): # SSL not used for local ips
- connection = client.getConnection(file_server.ip, 1544)
-
- assert len(file_server.connections) == 1
- assert connection.handshake
- assert connection.crypt
-
-
- # Close connection
- connection.close("Test ended")
- client.stop()
- time.sleep(0.1)
- assert len(file_server.connections) == 0
- assert file_server.num_incoming == 2 # One for file_server fixture, one for the test
-
- def testRawConnection(self, file_server):
- client = ConnectionServer(file_server.ip, 1545)
- assert file_server != client
-
- # Remove all supported crypto
- crypt_supported_bk = CryptConnection.manager.crypt_supported
- CryptConnection.manager.crypt_supported = []
-
- with mock.patch('Config.config.ip_local', return_value=[]): # SSL not used for local ips
- connection = client.getConnection(file_server.ip, 1544)
- assert len(file_server.connections) == 1
- assert not connection.crypt
-
- # Close connection
- connection.close()
- client.stop()
- time.sleep(0.01)
- assert len(file_server.connections) == 0
-
- # Reset supported crypts
- CryptConnection.manager.crypt_supported = crypt_supported_bk
-
- def testPing(self, file_server, site):
- client = ConnectionServer(file_server.ip, 1545)
- connection = client.getConnection(file_server.ip, 1544)
-
- assert connection.ping()
-
- connection.close()
- client.stop()
-
- def testGetConnection(self, file_server):
- client = ConnectionServer(file_server.ip, 1545)
- connection = client.getConnection(file_server.ip, 1544)
-
- # Get connection by ip/port
- connection2 = client.getConnection(file_server.ip, 1544)
- assert connection == connection2
-
- # Get connection by peerid
- assert not client.getConnection(file_server.ip, 1544, peer_id="notexists", create=False)
- connection2 = client.getConnection(file_server.ip, 1544, peer_id=connection.handshake["peer_id"], create=False)
- assert connection2 == connection
-
- connection.close()
- client.stop()
-
- def testFloodProtection(self, file_server):
- whitelist = file_server.whitelist # Save for reset
- file_server.whitelist = [] # Disable 127.0.0.1 whitelist
- client = ConnectionServer(file_server.ip, 1545)
-
- # Only allow 6 connection in 1 minute
- for reconnect in range(6):
- connection = client.getConnection(file_server.ip, 1544)
- assert connection.handshake
- connection.close()
-
- # The 7. one will timeout
- with pytest.raises(gevent.Timeout):
- with gevent.Timeout(0.1):
- connection = client.getConnection(file_server.ip, 1544)
-
- # Reset whitelist
- file_server.whitelist = whitelist
diff --git a/src/Test/TestContent.py b/src/Test/TestContent.py
deleted file mode 100644
index 7e7ca1a5..00000000
--- a/src/Test/TestContent.py
+++ /dev/null
@@ -1,273 +0,0 @@
-import json
-import time
-import io
-
-import pytest
-
-from Crypt import CryptBitcoin
-from Content.ContentManager import VerifyError, SignError
-from util.SafeRe import UnsafePatternError
-
-
-@pytest.mark.usefixtures("resetSettings")
-class TestContent:
- privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv"
-
- def testInclude(self, site):
- # Rules defined in parent content.json
- rules = site.content_manager.getRules("data/test_include/content.json")
-
- assert rules["signers"] == ["15ik6LeBWnACWfaika1xqGapRZ1zh3JpCo"] # Valid signer
- assert rules["user_name"] == "test" # Extra data
- assert rules["max_size"] == 20000 # Max size of files
- assert not rules["includes_allowed"] # Don't allow more includes
- assert rules["files_allowed"] == "data.json" # Allowed file pattern
-
- # Valid signers for "data/test_include/content.json"
- valid_signers = site.content_manager.getValidSigners("data/test_include/content.json")
- assert "15ik6LeBWnACWfaika1xqGapRZ1zh3JpCo" in valid_signers # Extra valid signer defined in parent content.json
- assert "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" in valid_signers # The site itself
- assert len(valid_signers) == 2 # No more
-
- # Valid signers for "data/users/content.json"
- valid_signers = site.content_manager.getValidSigners("data/users/content.json")
- assert "1LSxsKfC9S9TVXGGNSM3vPHjyW82jgCX5f" in valid_signers # Extra valid signer defined in parent content.json
- assert "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" in valid_signers # The site itself
- assert len(valid_signers) == 2
-
- # Valid signers for root content.json
- assert site.content_manager.getValidSigners("content.json") == ["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"]
-
- def testInlcudeLimits(self, site, crypt_bitcoin_lib):
- # Data validation
- res = []
- data_dict = {
- "files": {
- "data.json": {
- "sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906",
- "size": 505
- }
- },
- "modified": time.time()
- }
-
- # Normal data
- data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)}
- data_json = json.dumps(data_dict).encode()
- data = io.BytesIO(data_json)
- assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
-
- # Reset
- del data_dict["signs"]
-
- # Too large
- data_dict["files"]["data.json"]["size"] = 200000 # Emulate 2MB sized data.json
- data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)}
- data = io.BytesIO(json.dumps(data_dict).encode())
- with pytest.raises(VerifyError) as err:
- site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
- assert "Include too large" in str(err.value)
-
- # Reset
- data_dict["files"]["data.json"]["size"] = 505
- del data_dict["signs"]
-
- # Not allowed file
- data_dict["files"]["notallowed.exe"] = data_dict["files"]["data.json"]
- data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)}
- data = io.BytesIO(json.dumps(data_dict).encode())
- with pytest.raises(VerifyError) as err:
- site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
- assert "File not allowed" in str(err.value)
-
- # Reset
- del data_dict["files"]["notallowed.exe"]
- del data_dict["signs"]
-
- # Should work again
- data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)}
- data = io.BytesIO(json.dumps(data_dict).encode())
- assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
-
- @pytest.mark.parametrize("inner_path", ["content.json", "data/test_include/content.json", "data/users/content.json"])
- def testSign(self, site, inner_path):
- # Bad privatekey
- with pytest.raises(SignError) as err:
- site.content_manager.sign(inner_path, privatekey="5aaa3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMnaa", filewrite=False)
- assert "Private key invalid" in str(err.value)
-
- # Good privatekey
- content = site.content_manager.sign(inner_path, privatekey=self.privatekey, filewrite=False)
- content_old = site.content_manager.contents[inner_path] # Content before the sign
- assert not content_old == content # Timestamp changed
- assert site.address in content["signs"] # Used the site's private key to sign
- if inner_path == "content.json":
- assert len(content["files"]) == 17
- elif inner_path == "data/test-include/content.json":
- assert len(content["files"]) == 1
- elif inner_path == "data/users/content.json":
- assert len(content["files"]) == 0
-
- # Everything should be same as before except the modified timestamp and the signs
- assert (
- {key: val for key, val in content_old.items() if key not in ["modified", "signs", "sign", "zeronet_version"]}
- ==
- {key: val for key, val in content.items() if key not in ["modified", "signs", "sign", "zeronet_version"]}
- )
-
- def testSignOptionalFiles(self, site):
- for hash in list(site.content_manager.hashfield):
- site.content_manager.hashfield.remove(hash)
-
- assert len(site.content_manager.hashfield) == 0
-
- site.content_manager.contents["content.json"]["optional"] = "((data/img/zero.*))"
- content_optional = site.content_manager.sign(privatekey=self.privatekey, filewrite=False, remove_missing_optional=True)
-
- del site.content_manager.contents["content.json"]["optional"]
- content_nooptional = site.content_manager.sign(privatekey=self.privatekey, filewrite=False, remove_missing_optional=True)
-
- assert len(content_nooptional.get("files_optional", {})) == 0 # No optional files if no pattern
- assert len(content_optional["files_optional"]) > 0
- assert len(site.content_manager.hashfield) == len(content_optional["files_optional"]) # Hashed optional files should be added to hashfield
- assert len(content_nooptional["files"]) > len(content_optional["files"])
-
- def testFileInfo(self, site):
- assert "sha512" in site.content_manager.getFileInfo("index.html")
- assert site.content_manager.getFileInfo("data/img/domain.png")["content_inner_path"] == "content.json"
- assert site.content_manager.getFileInfo("data/users/hello.png")["content_inner_path"] == "data/users/content.json"
- assert site.content_manager.getFileInfo("data/users/content.json")["content_inner_path"] == "data/users/content.json"
- assert not site.content_manager.getFileInfo("notexist")
-
- # Optional file
- file_info_optional = site.content_manager.getFileInfo("data/optional.txt")
- assert "sha512" in file_info_optional
- assert file_info_optional["optional"] is True
-
- # Not exists yet user content.json
- assert "cert_signers" in site.content_manager.getFileInfo("data/users/unknown/content.json")
-
- # Optional user file
- file_info_optional = site.content_manager.getFileInfo("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
- assert "sha512" in file_info_optional
- assert file_info_optional["optional"] is True
-
- def testVerify(self, site, crypt_bitcoin_lib):
- inner_path = "data/test_include/content.json"
- data_dict = site.storage.loadJson(inner_path)
- data = io.BytesIO(json.dumps(data_dict).encode("utf8"))
-
- # Re-sign
- data_dict["signs"] = {
- "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
- }
- assert site.content_manager.verifyFile(inner_path, data, ignore_same=False)
-
- # Wrong address
- data_dict["address"] = "Othersite"
- del data_dict["signs"]
- data_dict["signs"] = {
- "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
- }
- data = io.BytesIO(json.dumps(data_dict).encode())
- with pytest.raises(VerifyError) as err:
- site.content_manager.verifyFile(inner_path, data, ignore_same=False)
- assert "Wrong site address" in str(err.value)
-
- # Wrong inner_path
- data_dict["address"] = "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"
- data_dict["inner_path"] = "content.json"
- del data_dict["signs"]
- data_dict["signs"] = {
- "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
- }
- data = io.BytesIO(json.dumps(data_dict).encode())
- with pytest.raises(VerifyError) as err:
- site.content_manager.verifyFile(inner_path, data, ignore_same=False)
- assert "Wrong inner_path" in str(err.value)
-
- # Everything right again
- data_dict["address"] = "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"
- data_dict["inner_path"] = inner_path
- del data_dict["signs"]
- data_dict["signs"] = {
- "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
- }
- data = io.BytesIO(json.dumps(data_dict).encode())
- assert site.content_manager.verifyFile(inner_path, data, ignore_same=False)
-
- def testVerifyInnerPath(self, site, crypt_bitcoin_lib):
- inner_path = "content.json"
- data_dict = site.storage.loadJson(inner_path)
-
- for good_relative_path in ["data.json", "out/data.json", "Any File [by none] (1).jpg", "árvzítűrő/tükörfúrógép.txt"]:
- data_dict["files"] = {good_relative_path: {"sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906", "size": 505}}
-
- if "sign" in data_dict:
- del data_dict["sign"]
- del data_dict["signs"]
- data_dict["signs"] = {
- "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
- }
- data = io.BytesIO(json.dumps(data_dict).encode())
- assert site.content_manager.verifyFile(inner_path, data, ignore_same=False)
-
- for bad_relative_path in ["../data.json", "data/" * 100, "invalid|file.jpg", "con.txt", "any/con.txt"]:
- data_dict["files"] = {bad_relative_path: {"sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906", "size": 505}}
-
- if "sign" in data_dict:
- del data_dict["sign"]
- del data_dict["signs"]
- data_dict["signs"] = {
- "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
- }
- data = io.BytesIO(json.dumps(data_dict).encode())
- with pytest.raises(VerifyError) as err:
- site.content_manager.verifyFile(inner_path, data, ignore_same=False)
- assert "Invalid relative path" in str(err.value)
-
- @pytest.mark.parametrize("key", ["ignore", "optional"])
- def testSignUnsafePattern(self, site, key):
- site.content_manager.contents["content.json"][key] = "([a-zA-Z]+)*"
- with pytest.raises(UnsafePatternError) as err:
- site.content_manager.sign("content.json", privatekey=self.privatekey, filewrite=False)
- assert "Potentially unsafe" in str(err.value)
-
-
- def testVerifyUnsafePattern(self, site, crypt_bitcoin_lib):
- site.content_manager.contents["content.json"]["includes"]["data/test_include/content.json"]["files_allowed"] = "([a-zA-Z]+)*"
- with pytest.raises(UnsafePatternError) as err:
- with site.storage.open("data/test_include/content.json") as data:
- site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
- assert "Potentially unsafe" in str(err.value)
-
- site.content_manager.contents["data/users/content.json"]["user_contents"]["permission_rules"]["([a-zA-Z]+)*"] = {"max_size": 0}
- with pytest.raises(UnsafePatternError) as err:
- with site.storage.open("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json") as data:
- site.content_manager.verifyFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", data, ignore_same=False)
- assert "Potentially unsafe" in str(err.value)
-
- def testPathValidation(self, site):
- assert site.content_manager.isValidRelativePath("test.txt")
- assert site.content_manager.isValidRelativePath("test/!@#$%^&().txt")
- assert site.content_manager.isValidRelativePath("ÜøßÂŒƂÆÇ.txt")
- assert site.content_manager.isValidRelativePath("тест.текст")
- assert site.content_manager.isValidRelativePath("𝐮𝐧𝐢𝐜𝐨𝐝𝐞𝑖𝑠𝒂𝒘𝒆𝒔𝒐𝒎𝒆")
-
- # Test rules based on https://stackoverflow.com/questions/1976007/what-characters-are-forbidden-in-windows-and-linux-directory-names
-
- assert not site.content_manager.isValidRelativePath("any\\hello.txt") # \ not allowed
- assert not site.content_manager.isValidRelativePath("/hello.txt") # Cannot start with /
- assert not site.content_manager.isValidRelativePath("\\hello.txt") # Cannot start with \
- assert not site.content_manager.isValidRelativePath("../hello.txt") # Not allowed .. in path
- assert not site.content_manager.isValidRelativePath("\0hello.txt") # NULL character
- assert not site.content_manager.isValidRelativePath("\31hello.txt") # 0-31 (ASCII control characters)
- assert not site.content_manager.isValidRelativePath("any/hello.txt ") # Cannot end with space
- assert not site.content_manager.isValidRelativePath("any/hello.txt.") # Cannot end with dot
- assert site.content_manager.isValidRelativePath(".hello.txt") # Allow start with dot
- assert not site.content_manager.isValidRelativePath("any/CON") # Protected names on Windows
- assert not site.content_manager.isValidRelativePath("CON/any.txt")
- assert not site.content_manager.isValidRelativePath("any/lpt1.txt")
- assert site.content_manager.isValidRelativePath("any/CONAN")
- assert not site.content_manager.isValidRelativePath("any/CONOUT$")
- assert not site.content_manager.isValidRelativePath("a" * 256) # Max 255 characters allowed
diff --git a/src/Test/TestContentUser.py b/src/Test/TestContentUser.py
deleted file mode 100644
index 8e91dd3e..00000000
--- a/src/Test/TestContentUser.py
+++ /dev/null
@@ -1,390 +0,0 @@
-import json
-import io
-
-import pytest
-
-from Crypt import CryptBitcoin
-from Content.ContentManager import VerifyError, SignError
-
-
-@pytest.mark.usefixtures("resetSettings")
-class TestContentUser:
- def testSigners(self, site):
- # File info for not existing user file
- file_info = site.content_manager.getFileInfo("data/users/notexist/data.json")
- assert file_info["content_inner_path"] == "data/users/notexist/content.json"
- file_info = site.content_manager.getFileInfo("data/users/notexist/a/b/data.json")
- assert file_info["content_inner_path"] == "data/users/notexist/content.json"
- valid_signers = site.content_manager.getValidSigners("data/users/notexist/content.json")
- assert valid_signers == ["14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet", "notexist", "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"]
-
- # File info for exsitsing user file
- valid_signers = site.content_manager.getValidSigners("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json")
- assert '1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT' in valid_signers # The site address
- assert '14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet' in valid_signers # Admin user defined in data/users/content.json
- assert '1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C' in valid_signers # The user itself
- assert len(valid_signers) == 3 # No more valid signers
-
- # Valid signer for banned user
- user_content = site.storage.loadJson("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json")
- user_content["cert_user_id"] = "bad@zeroid.bit"
-
- valid_signers = site.content_manager.getValidSigners("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
- assert '1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT' in valid_signers # The site address
- assert '14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet' in valid_signers # Admin user defined in data/users/content.json
- assert '1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C' not in valid_signers # The user itself
-
- def testRules(self, site):
- # We going to manipulate it this test rules based on data/users/content.json
- user_content = site.storage.loadJson("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json")
-
- # Known user
- user_content["cert_auth_type"] = "web"
- user_content["cert_user_id"] = "nofish@zeroid.bit"
- rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
- assert rules["max_size"] == 100000
- assert "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" in rules["signers"]
-
- # Unknown user
- user_content["cert_auth_type"] = "web"
- user_content["cert_user_id"] = "noone@zeroid.bit"
- rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
- assert rules["max_size"] == 10000
- assert "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" in rules["signers"]
-
- # User with more size limit based on auth type
- user_content["cert_auth_type"] = "bitmsg"
- user_content["cert_user_id"] = "noone@zeroid.bit"
- rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
- assert rules["max_size"] == 15000
- assert "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" in rules["signers"]
-
- # Banned user
- user_content["cert_auth_type"] = "web"
- user_content["cert_user_id"] = "bad@zeroid.bit"
- rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
- assert "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" not in rules["signers"]
-
- def testRulesAddress(self, site):
- user_inner_path = "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json"
- user_content = site.storage.loadJson(user_inner_path)
-
- rules = site.content_manager.getRules(user_inner_path, user_content)
- assert rules["max_size"] == 10000
- assert "1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9" in rules["signers"]
-
- users_content = site.content_manager.contents["data/users/content.json"]
-
- # Ban user based on address
- users_content["user_contents"]["permissions"]["1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9"] = False
- rules = site.content_manager.getRules(user_inner_path, user_content)
- assert "1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9" not in rules["signers"]
-
- # Change max allowed size
- users_content["user_contents"]["permissions"]["1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9"] = {"max_size": 20000}
- rules = site.content_manager.getRules(user_inner_path, user_content)
- assert rules["max_size"] == 20000
-
- def testVerifyAddress(self, site):
- privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT
- user_inner_path = "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json"
- data_dict = site.storage.loadJson(user_inner_path)
- users_content = site.content_manager.contents["data/users/content.json"]
-
- data = io.BytesIO(json.dumps(data_dict).encode())
- assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
-
- # Test error on 15k data.json
- data_dict["files"]["data.json"]["size"] = 1024 * 15
- del data_dict["signs"] # Remove signs before signing
- data_dict["signs"] = {
- "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
- }
- data = io.BytesIO(json.dumps(data_dict).encode())
- with pytest.raises(VerifyError) as err:
- site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
- assert "Include too large" in str(err.value)
-
- # Give more space based on address
- users_content["user_contents"]["permissions"]["1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9"] = {"max_size": 20000}
- del data_dict["signs"] # Remove signs before signing
- data_dict["signs"] = {
- "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
- }
- data = io.BytesIO(json.dumps(data_dict).encode())
- assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
-
- def testVerify(self, site):
- privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT
- user_inner_path = "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json"
- data_dict = site.storage.loadJson(user_inner_path)
- users_content = site.content_manager.contents["data/users/content.json"]
-
- data = io.BytesIO(json.dumps(data_dict).encode())
- assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
-
- # Test max size exception by setting allowed to 0
- rules = site.content_manager.getRules(user_inner_path, data_dict)
- assert rules["max_size"] == 10000
- assert users_content["user_contents"]["permission_rules"][".*"]["max_size"] == 10000
-
- users_content["user_contents"]["permission_rules"][".*"]["max_size"] = 0
- rules = site.content_manager.getRules(user_inner_path, data_dict)
- assert rules["max_size"] == 0
- data = io.BytesIO(json.dumps(data_dict).encode())
-
- with pytest.raises(VerifyError) as err:
- site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
- assert "Include too large" in str(err.value)
- users_content["user_contents"]["permission_rules"][".*"]["max_size"] = 10000 # Reset
-
- # Test max optional size exception
- # 1 MB gif = Allowed
- data_dict["files_optional"]["peanut-butter-jelly-time.gif"]["size"] = 1024 * 1024
- del data_dict["signs"] # Remove signs before signing
- data_dict["signs"] = {
- "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
- }
- data = io.BytesIO(json.dumps(data_dict).encode())
- assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
-
- # 100 MB gif = Not allowed
- data_dict["files_optional"]["peanut-butter-jelly-time.gif"]["size"] = 100 * 1024 * 1024
- del data_dict["signs"] # Remove signs before signing
- data_dict["signs"] = {
- "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
- }
- data = io.BytesIO(json.dumps(data_dict).encode())
- with pytest.raises(VerifyError) as err:
- site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
- assert "Include optional files too large" in str(err.value)
- data_dict["files_optional"]["peanut-butter-jelly-time.gif"]["size"] = 1024 * 1024 # Reset
-
- # hello.exe = Not allowed
- data_dict["files_optional"]["hello.exe"] = data_dict["files_optional"]["peanut-butter-jelly-time.gif"]
- del data_dict["signs"] # Remove signs before signing
- data_dict["signs"] = {
- "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
- }
- data = io.BytesIO(json.dumps(data_dict).encode())
- with pytest.raises(VerifyError) as err:
- site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
- assert "Optional file not allowed" in str(err.value)
- del data_dict["files_optional"]["hello.exe"] # Reset
-
- # Includes not allowed in user content
- data_dict["includes"] = {"other.json": {}}
- del data_dict["signs"] # Remove signs before signing
- data_dict["signs"] = {
- "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
- }
- data = io.BytesIO(json.dumps(data_dict).encode())
- with pytest.raises(VerifyError) as err:
- site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
- assert "Includes not allowed" in str(err.value)
-
- def testCert(self, site):
- # user_addr = "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C"
- user_priv = "5Kk7FSA63FC2ViKmKLuBxk9gQkaQ5713hKq8LmFAf4cVeXh6K6A"
- # cert_addr = "14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet"
- cert_priv = "5JusJDSjHaMHwUjDT3o6eQ54pA6poo8La5fAgn1wNc3iK59jxjA"
-
- # Check if the user file is loaded
- assert "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json" in site.content_manager.contents
- user_content = site.content_manager.contents["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"]
- rules_content = site.content_manager.contents["data/users/content.json"]
-
- # Override valid cert signers for the test
- rules_content["user_contents"]["cert_signers"]["zeroid.bit"] = [
- "14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet",
- "1iD5ZQJMNXu43w1qLB8sfdHVKppVMduGz"
- ]
-
- # Check valid cert signers
- rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
- assert rules["cert_signers"] == {"zeroid.bit": [
- "14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet",
- "1iD5ZQJMNXu43w1qLB8sfdHVKppVMduGz"
- ]}
-
- # Sign a valid cert
- user_content["cert_sign"] = CryptBitcoin.sign("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % (
- user_content["cert_auth_type"],
- user_content["cert_user_id"].split("@")[0]
- ), cert_priv)
-
- # Verify cert
- assert site.content_manager.verifyCert("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
-
- # Verify if the cert is valid for other address
- assert not site.content_manager.verifyCert("data/users/badaddress/content.json", user_content)
-
- # Sign user content
- signed_content = site.content_manager.sign(
- "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False
- )
-
- # Test user cert
- assert site.content_manager.verifyFile(
- "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
- io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
- )
-
- # Test banned user
- cert_user_id = user_content["cert_user_id"] # My username
- site.content_manager.contents["data/users/content.json"]["user_contents"]["permissions"][cert_user_id] = False
- with pytest.raises(VerifyError) as err:
- site.content_manager.verifyFile(
- "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
- io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
- )
- assert "Valid signs: 0/1" in str(err.value)
- del site.content_manager.contents["data/users/content.json"]["user_contents"]["permissions"][cert_user_id] # Reset
-
- # Test invalid cert
- user_content["cert_sign"] = CryptBitcoin.sign(
- "badaddress#%s/%s" % (user_content["cert_auth_type"], user_content["cert_user_id"]), cert_priv
- )
- signed_content = site.content_manager.sign(
- "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False
- )
- with pytest.raises(VerifyError) as err:
- site.content_manager.verifyFile(
- "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
- io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
- )
- assert "Invalid cert" in str(err.value)
-
- # Test banned user, signed by the site owner
- user_content["cert_sign"] = CryptBitcoin.sign("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % (
- user_content["cert_auth_type"],
- user_content["cert_user_id"].split("@")[0]
- ), cert_priv)
- cert_user_id = user_content["cert_user_id"] # My username
- site.content_manager.contents["data/users/content.json"]["user_contents"]["permissions"][cert_user_id] = False
-
- site_privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT
- del user_content["signs"] # Remove signs before signing
- user_content["signs"] = {
- "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(user_content, sort_keys=True), site_privatekey)
- }
- assert site.content_manager.verifyFile(
- "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
- io.BytesIO(json.dumps(user_content).encode()), ignore_same=False
- )
-
- def testMissingCert(self, site):
- user_priv = "5Kk7FSA63FC2ViKmKLuBxk9gQkaQ5713hKq8LmFAf4cVeXh6K6A"
- cert_priv = "5JusJDSjHaMHwUjDT3o6eQ54pA6poo8La5fAgn1wNc3iK59jxjA"
-
- user_content = site.content_manager.contents["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"]
- rules_content = site.content_manager.contents["data/users/content.json"]
-
- # Override valid cert signers for the test
- rules_content["user_contents"]["cert_signers"]["zeroid.bit"] = [
- "14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet",
- "1iD5ZQJMNXu43w1qLB8sfdHVKppVMduGz"
- ]
-
- # Sign a valid cert
- user_content["cert_sign"] = CryptBitcoin.sign("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % (
- user_content["cert_auth_type"],
- user_content["cert_user_id"].split("@")[0]
- ), cert_priv)
- signed_content = site.content_manager.sign(
- "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False
- )
-
- assert site.content_manager.verifyFile(
- "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
- io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
- )
-
- # Test invalid cert_user_id
- user_content["cert_user_id"] = "nodomain"
- user_content["signs"] = {
- "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(user_content, sort_keys=True), user_priv)
- }
- signed_content = site.content_manager.sign(
- "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False
- )
- with pytest.raises(VerifyError) as err:
- site.content_manager.verifyFile(
- "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
- io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
- )
- assert "Invalid domain in cert_user_id" in str(err.value)
-
- # Test removed cert
- del user_content["cert_user_id"]
- del user_content["cert_auth_type"]
- del user_content["signs"] # Remove signs before signing
- user_content["signs"] = {
- "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(user_content, sort_keys=True), user_priv)
- }
- signed_content = site.content_manager.sign(
- "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False
- )
- with pytest.raises(VerifyError) as err:
- site.content_manager.verifyFile(
- "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
- io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
- )
- assert "Missing cert_user_id" in str(err.value)
-
-
- def testCertSignersPattern(self, site):
- user_priv = "5Kk7FSA63FC2ViKmKLuBxk9gQkaQ5713hKq8LmFAf4cVeXh6K6A"
- cert_priv = "5JusJDSjHaMHwUjDT3o6eQ54pA6poo8La5fAgn1wNc3iK59jxjA" # For 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet
-
- user_content = site.content_manager.contents["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"]
- rules_content = site.content_manager.contents["data/users/content.json"]
-
- # Override valid cert signers for the test
- rules_content["user_contents"]["cert_signers_pattern"] = "14wgQ[0-9][A-Z]"
-
- # Sign a valid cert
- user_content["cert_user_id"] = "certuser@14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet"
- user_content["cert_sign"] = CryptBitcoin.sign("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % (
- user_content["cert_auth_type"],
- "certuser"
- ), cert_priv)
- signed_content = site.content_manager.sign(
- "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False
- )
-
- assert site.content_manager.verifyFile(
- "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
- io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
- )
-
- # Cert does not matches the pattern
- rules_content["user_contents"]["cert_signers_pattern"] = "14wgX[0-9][A-Z]"
-
- with pytest.raises(VerifyError) as err:
- site.content_manager.verifyFile(
- "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
- io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
- )
- assert "Invalid cert signer: 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" in str(err.value)
-
- # Removed cert_signers_pattern
- del rules_content["user_contents"]["cert_signers_pattern"]
-
- with pytest.raises(VerifyError) as err:
- site.content_manager.verifyFile(
- "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
- io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
- )
- assert "Invalid cert signer: 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" in str(err.value)
-
-
- def testNewFile(self, site):
- privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT
- inner_path = "data/users/1NEWrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"
-
- site.storage.writeJson(inner_path, {"test": "data"})
- site.content_manager.sign(inner_path, privatekey)
- assert "test" in site.storage.loadJson(inner_path)
-
- site.storage.delete(inner_path)
diff --git a/src/Test/TestCryptBitcoin.py b/src/Test/TestCryptBitcoin.py
deleted file mode 100644
index 2bc087b5..00000000
--- a/src/Test/TestCryptBitcoin.py
+++ /dev/null
@@ -1,48 +0,0 @@
-from Crypt import CryptBitcoin
-
-
-class TestCryptBitcoin:
- def testSign(self, crypt_bitcoin_lib):
- privatekey = "5K9S6dVpufGnroRgFrT6wsKiz2mJRYsC73eWDmajaHserAp3F1C"
- privatekey_bad = "5Jbm9rrusXyApAoM8YoM4Rja337zMMoBUMRJ1uijiguU2aZRnwC"
-
- # Get address by privatekey
- address = crypt_bitcoin_lib.privatekeyToAddress(privatekey)
- assert address == "1MpDMxFeDUkiHohxx9tbGLeEGEuR4ZNsJz"
-
- address_bad = crypt_bitcoin_lib.privatekeyToAddress(privatekey_bad)
- assert address_bad != "1MpDMxFeDUkiHohxx9tbGLeEGEuR4ZNsJz"
-
- # Text signing
- data_len_list = list(range(0, 300, 10))
- data_len_list += [1024, 2048, 1024 * 128, 1024 * 1024, 1024 * 2048]
- for data_len in data_len_list:
- data = data_len * "!"
- sign = crypt_bitcoin_lib.sign(data, privatekey)
-
- assert crypt_bitcoin_lib.verify(data, address, sign)
- assert not crypt_bitcoin_lib.verify("invalid" + data, address, sign)
-
- # Signed by bad privatekey
- sign_bad = crypt_bitcoin_lib.sign("hello", privatekey_bad)
- assert not crypt_bitcoin_lib.verify("hello", address, sign_bad)
-
- def testVerify(self, crypt_bitcoin_lib):
- sign_uncompressed = b'G6YkcFTuwKMVMHI2yycGQIFGbCZVNsZEZvSlOhKpHUt/BlADY94egmDAWdlrbbFrP9wH4aKcEfbLO8sa6f63VU0='
- assert crypt_bitcoin_lib.verify("1NQUem2M4cAqWua6BVFBADtcSP55P4QobM#web/gitcenter", "19Bir5zRm1yo4pw9uuxQL8xwf9b7jqMpR", sign_uncompressed)
-
- sign_compressed = b'H6YkcFTuwKMVMHI2yycGQIFGbCZVNsZEZvSlOhKpHUt/BlADY94egmDAWdlrbbFrP9wH4aKcEfbLO8sa6f63VU0='
- assert crypt_bitcoin_lib.verify("1NQUem2M4cAqWua6BVFBADtcSP55P4QobM#web/gitcenter", "1KH5BdNnqxh2KRWMMT8wUXzUgz4vVQ4S8p", sign_compressed)
-
- def testNewPrivatekey(self):
- assert CryptBitcoin.newPrivatekey() != CryptBitcoin.newPrivatekey()
- assert CryptBitcoin.privatekeyToAddress(CryptBitcoin.newPrivatekey())
-
- def testNewSeed(self):
- assert CryptBitcoin.newSeed() != CryptBitcoin.newSeed()
- assert CryptBitcoin.privatekeyToAddress(
- CryptBitcoin.hdPrivatekey(CryptBitcoin.newSeed(), 0)
- )
- assert CryptBitcoin.privatekeyToAddress(
- CryptBitcoin.hdPrivatekey(CryptBitcoin.newSeed(), 2**256)
- )
diff --git a/src/Test/TestCryptConnection.py b/src/Test/TestCryptConnection.py
deleted file mode 100644
index 46d2affc..00000000
--- a/src/Test/TestCryptConnection.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import os
-
-from Config import config
-from Crypt import CryptConnection
-
-
-class TestCryptConnection:
- def testSslCert(self):
- # Remove old certs
- if os.path.isfile("%s/cert-rsa.pem" % config.data_dir):
- os.unlink("%s/cert-rsa.pem" % config.data_dir)
- if os.path.isfile("%s/key-rsa.pem" % config.data_dir):
- os.unlink("%s/key-rsa.pem" % config.data_dir)
-
- # Generate certs
- CryptConnection.manager.loadCerts()
-
- assert "tls-rsa" in CryptConnection.manager.crypt_supported
- assert CryptConnection.manager.selectCrypt(["tls-rsa", "unknown"]) == "tls-rsa" # It should choose the known crypt
-
- # Check openssl cert generation
- assert os.path.isfile("%s/cert-rsa.pem" % config.data_dir)
- assert os.path.isfile("%s/key-rsa.pem" % config.data_dir)
diff --git a/src/Test/TestCryptHash.py b/src/Test/TestCryptHash.py
deleted file mode 100644
index b91dbcca..00000000
--- a/src/Test/TestCryptHash.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import base64
-
-from Crypt import CryptHash
-
-sha512t_sum_hex = "2e9466d8aa1f340c91203b4ddbe9b6669879616a1b8e9571058a74195937598d"
-sha512t_sum_bin = b".\x94f\xd8\xaa\x1f4\x0c\x91 ;M\xdb\xe9\xb6f\x98yaj\x1b\x8e\x95q\x05\x8at\x19Y7Y\x8d"
-sha256_sum_hex = "340cd04be7f530e3a7c1bc7b24f225ba5762ec7063a56e1ae01a30d56722e5c3"
-
-
-class TestCryptBitcoin:
-
- def testSha(self, site):
- file_path = site.storage.getPath("dbschema.json")
- assert CryptHash.sha512sum(file_path) == sha512t_sum_hex
- assert CryptHash.sha512sum(open(file_path, "rb")) == sha512t_sum_hex
- assert CryptHash.sha512sum(open(file_path, "rb"), format="digest") == sha512t_sum_bin
-
- assert CryptHash.sha256sum(file_path) == sha256_sum_hex
- assert CryptHash.sha256sum(open(file_path, "rb")) == sha256_sum_hex
-
- with open(file_path, "rb") as f:
- hash = CryptHash.Sha512t(f.read(100))
- hash.hexdigest() != sha512t_sum_hex
- hash.update(f.read(1024 * 1024))
- assert hash.hexdigest() == sha512t_sum_hex
-
- def testRandom(self):
- assert len(CryptHash.random(64)) == 64
- assert CryptHash.random() != CryptHash.random()
- assert bytes.fromhex(CryptHash.random(encoding="hex"))
- assert base64.b64decode(CryptHash.random(encoding="base64"))
diff --git a/src/Test/TestDb.py b/src/Test/TestDb.py
deleted file mode 100644
index 67f383a3..00000000
--- a/src/Test/TestDb.py
+++ /dev/null
@@ -1,137 +0,0 @@
-import io
-
-
-class TestDb:
- def testCheckTables(self, db):
- tables = [row["name"] for row in db.execute("SELECT name FROM sqlite_master WHERE type='table'")]
- assert "keyvalue" in tables # To store simple key -> value
- assert "json" in tables # Json file path registry
- assert "test" in tables # The table defined in dbschema.json
-
- # Verify test table
- cols = [col["name"] for col in db.execute("PRAGMA table_info(test)")]
- assert "test_id" in cols
- assert "title" in cols
-
- # Add new table
- assert "newtest" not in tables
- db.schema["tables"]["newtest"] = {
- "cols": [
- ["newtest_id", "INTEGER"],
- ["newtitle", "TEXT"],
- ],
- "indexes": ["CREATE UNIQUE INDEX newtest_id ON newtest(newtest_id)"],
- "schema_changed": 1426195822
- }
- db.checkTables()
- tables = [row["name"] for row in db.execute("SELECT name FROM sqlite_master WHERE type='table'")]
- assert "test" in tables
- assert "newtest" in tables
-
- def testQueries(self, db):
- # Test insert
- for i in range(100):
- db.execute("INSERT INTO test ?", {"test_id": i, "title": "Test #%s" % i})
-
- assert db.execute("SELECT COUNT(*) AS num FROM test").fetchone()["num"] == 100
-
- # Test single select
- assert db.execute("SELECT COUNT(*) AS num FROM test WHERE ?", {"test_id": 1}).fetchone()["num"] == 1
-
- # Test multiple select
- assert db.execute("SELECT COUNT(*) AS num FROM test WHERE ?", {"test_id": [1, 2, 3]}).fetchone()["num"] == 3
- assert db.execute(
- "SELECT COUNT(*) AS num FROM test WHERE ?",
- {"test_id": [1, 2, 3], "title": "Test #2"}
- ).fetchone()["num"] == 1
- assert db.execute(
- "SELECT COUNT(*) AS num FROM test WHERE ?",
- {"test_id": [1, 2, 3], "title": ["Test #2", "Test #3", "Test #4"]}
- ).fetchone()["num"] == 2
-
- # Test multiple select using named params
- assert db.execute("SELECT COUNT(*) AS num FROM test WHERE test_id IN :test_id", {"test_id": [1, 2, 3]}).fetchone()["num"] == 3
- assert db.execute(
- "SELECT COUNT(*) AS num FROM test WHERE test_id IN :test_id AND title = :title",
- {"test_id": [1, 2, 3], "title": "Test #2"}
- ).fetchone()["num"] == 1
- assert db.execute(
- "SELECT COUNT(*) AS num FROM test WHERE test_id IN :test_id AND title IN :title",
- {"test_id": [1, 2, 3], "title": ["Test #2", "Test #3", "Test #4"]}
- ).fetchone()["num"] == 2
-
- # Large ammount of IN values
- assert db.execute(
- "SELECT COUNT(*) AS num FROM test WHERE ?",
- {"not__test_id": list(range(2, 3000))}
- ).fetchone()["num"] == 2
- assert db.execute(
- "SELECT COUNT(*) AS num FROM test WHERE ?",
- {"test_id": list(range(50, 3000))}
- ).fetchone()["num"] == 50
-
- assert db.execute(
- "SELECT COUNT(*) AS num FROM test WHERE ?",
- {"not__title": ["Test #%s" % i for i in range(50, 3000)]}
- ).fetchone()["num"] == 50
-
- assert db.execute(
- "SELECT COUNT(*) AS num FROM test WHERE ?",
- {"title__like": "%20%"}
- ).fetchone()["num"] == 1
-
- # Test named parameter escaping
- assert db.execute(
- "SELECT COUNT(*) AS num FROM test WHERE test_id = :test_id AND title LIKE :titlelike",
- {"test_id": 1, "titlelike": "Test%"}
- ).fetchone()["num"] == 1
-
- def testEscaping(self, db):
- # Test insert
- for i in range(100):
- db.execute("INSERT INTO test ?", {"test_id": i, "title": "Test '\" #%s" % i})
-
- assert db.execute(
- "SELECT COUNT(*) AS num FROM test WHERE ?",
- {"title": "Test '\" #1"}
- ).fetchone()["num"] == 1
-
- assert db.execute(
- "SELECT COUNT(*) AS num FROM test WHERE ?",
- {"title": ["Test '\" #%s" % i for i in range(0, 50)]}
- ).fetchone()["num"] == 50
-
- assert db.execute(
- "SELECT COUNT(*) AS num FROM test WHERE ?",
- {"not__title": ["Test '\" #%s" % i for i in range(50, 3000)]}
- ).fetchone()["num"] == 50
-
-
- def testUpdateJson(self, db):
- f = io.BytesIO()
- f.write("""
- {
- "test": [
- {"test_id": 1, "title": "Test 1 title", "extra col": "Ignore it"}
- ]
- }
- """.encode())
- f.seek(0)
- assert db.updateJson(db.db_dir + "data.json", f) is True
- assert db.execute("SELECT COUNT(*) AS num FROM test_importfilter").fetchone()["num"] == 1
- assert db.execute("SELECT COUNT(*) AS num FROM test").fetchone()["num"] == 1
-
- def testUnsafePattern(self, db):
- db.schema["maps"] = {"[A-Za-z.]*": db.schema["maps"]["data.json"]} # Only repetition of . supported
- f = io.StringIO()
- f.write("""
- {
- "test": [
- {"test_id": 1, "title": "Test 1 title", "extra col": "Ignore it"}
- ]
- }
- """)
- f.seek(0)
- assert db.updateJson(db.db_dir + "data.json", f) is False
- assert db.execute("SELECT COUNT(*) AS num FROM test_importfilter").fetchone()["num"] == 0
- assert db.execute("SELECT COUNT(*) AS num FROM test").fetchone()["num"] == 0
diff --git a/src/Test/TestDbQuery.py b/src/Test/TestDbQuery.py
deleted file mode 100644
index 597bc950..00000000
--- a/src/Test/TestDbQuery.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import re
-
-from Db.DbQuery import DbQuery
-
-
-class TestDbQuery:
- def testParse(self):
- query_text = """
- SELECT
- 'comment' AS type,
- date_added, post.title AS title,
- keyvalue.value || ': ' || comment.body AS body,
- '?Post:' || comment.post_id || '#Comments' AS url
- FROM
- comment
- LEFT JOIN json USING (json_id)
- LEFT JOIN json AS json_content ON (json_content.directory = json.directory AND json_content.file_name='content.json')
- LEFT JOIN keyvalue ON (keyvalue.json_id = json_content.json_id AND key = 'cert_user_id')
- LEFT JOIN post ON (comment.post_id = post.post_id)
- WHERE
- post.date_added > 123
- ORDER BY
- date_added DESC
- LIMIT 20
- """
- query = DbQuery(query_text)
- assert query.parts["LIMIT"] == "20"
- assert query.fields["body"] == "keyvalue.value || ': ' || comment.body"
- assert re.sub("[ \r\n]", "", str(query)) == re.sub("[ \r\n]", "", query_text)
- query.wheres.append("body LIKE '%hello%'")
- assert "body LIKE '%hello%'" in str(query)
diff --git a/src/Test/TestDebug.py b/src/Test/TestDebug.py
deleted file mode 100644
index e3eb20b3..00000000
--- a/src/Test/TestDebug.py
+++ /dev/null
@@ -1,52 +0,0 @@
-from Debug import Debug
-import gevent
-import os
-import re
-
-import pytest
-
-
-class TestDebug:
- @pytest.mark.parametrize("items,expected", [
- (["@/src/A/B/C.py:17"], ["A/B/C.py line 17"]), # basic test
- (["@/src/Db/Db.py:17"], ["Db.py line 17"]), # path compression
- (["%s:1" % __file__], ["TestDebug.py line 1"]),
- (["@/plugins/Chart/ChartDb.py:100"], ["ChartDb.py line 100"]), # plugins
- (["@/main.py:17"], ["main.py line 17"]), # root
- (["@\\src\\Db\\__init__.py:17"], ["Db/__init__.py line 17"]), # Windows paths
- ([":1"], []), # importlib builtins
- ([":1"], []), # importlib builtins
- (["/home/ivanq/ZeroNet/src/main.py:13"], ["?/src/main.py line 13"]), # best-effort anonymization
- (["C:\\ZeroNet\\core\\src\\main.py:13"], ["?/src/main.py line 13"]),
- (["/root/main.py:17"], ["/root/main.py line 17"]),
- (["{gevent}:13"], ["/__init__.py line 13"]), # modules
- (["{os}:13"], [" line 13"]), # python builtin modules
- (["src/gevent/event.py:17"], ["/event.py line 17"]), # gevent-overriden __file__
- (["@/src/Db/Db.py:17", "@/src/Db/DbQuery.py:1"], ["Db.py line 17", "DbQuery.py line 1"]), # mutliple args
- (["@/src/Db/Db.py:17", "@/src/Db/Db.py:1"], ["Db.py line 17", "1"]), # same file
- (["{os}:1", "@/src/Db/Db.py:17"], [" line 1", "Db.py line 17"]), # builtins
- (["{gevent}:1"] + ["{os}:3"] * 4 + ["@/src/Db/Db.py:17"], ["/__init__.py line 1", "...", "Db.py line 17"])
- ])
- def testFormatTraceback(self, items, expected):
- q_items = []
- for item in items:
- file, line = item.rsplit(":", 1)
- if file.startswith("@"):
- file = Debug.root_dir + file[1:]
- file = file.replace("{os}", os.__file__)
- file = file.replace("{gevent}", gevent.__file__)
- q_items.append((file, int(line)))
- assert Debug.formatTraceback(q_items) == expected
-
- def testFormatException(self):
- try:
- raise ValueError("Test exception")
- except Exception:
- assert re.match(r"ValueError: Test exception in TestDebug.py line [0-9]+", Debug.formatException())
- try:
- os.path.abspath(1)
- except Exception:
- assert re.search(r"in TestDebug.py line [0-9]+ > <(posixpath|ntpath)> line ", Debug.formatException())
-
- def testFormatStack(self):
- assert re.match(r"TestDebug.py line [0-9]+ > <_pytest>/python.py line [0-9]+", Debug.formatStack())
diff --git a/src/Test/TestDiff.py b/src/Test/TestDiff.py
deleted file mode 100644
index 622951a1..00000000
--- a/src/Test/TestDiff.py
+++ /dev/null
@@ -1,58 +0,0 @@
-import io
-
-from util import Diff
-
-
-class TestDiff:
- def testDiff(self):
- assert Diff.diff(
- [],
- ["one", "two", "three"]
- ) == [("+", ["one", "two","three"])]
-
- assert Diff.diff(
- ["one", "two", "three"],
- ["one", "two", "three", "four", "five"]
- ) == [("=", 11), ("+", ["four", "five"])]
-
- assert Diff.diff(
- ["one", "two", "three", "six"],
- ["one", "two", "three", "four", "five", "six"]
- ) == [("=", 11), ("+", ["four", "five"]), ("=", 3)]
-
- assert Diff.diff(
- ["one", "two", "three", "hmm", "six"],
- ["one", "two", "three", "four", "five", "six"]
- ) == [("=", 11), ("-", 3), ("+", ["four", "five"]), ("=", 3)]
-
- assert Diff.diff(
- ["one", "two", "three"],
- []
- ) == [("-", 11)]
-
- def testUtf8(self):
- assert Diff.diff(
- ["one", "\xe5\xad\xa6\xe4\xb9\xa0\xe4\xb8\x8b", "two", "three"],
- ["one", "\xe5\xad\xa6\xe4\xb9\xa0\xe4\xb8\x8b", "two", "three", "four", "five"]
- ) == [("=", 20), ("+", ["four", "five"])]
-
- def testDiffLimit(self):
- old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix")
- new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix")
- actions = Diff.diff(list(old_f), list(new_f), limit=1024)
- assert actions
-
- old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix")
- new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix"*1024)
- actions = Diff.diff(list(old_f), list(new_f), limit=1024)
- assert actions is False
-
- def testPatch(self):
- old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix")
- new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix")
- actions = Diff.diff(
- list(old_f),
- list(new_f)
- )
- old_f.seek(0)
- assert Diff.patch(old_f, actions).getvalue() == new_f.getvalue()
diff --git a/src/Test/TestEvent.py b/src/Test/TestEvent.py
deleted file mode 100644
index 8bdafaaa..00000000
--- a/src/Test/TestEvent.py
+++ /dev/null
@@ -1,65 +0,0 @@
-import util
-
-
-class ExampleClass(object):
- def __init__(self):
- self.called = []
- self.onChanged = util.Event()
-
- def increment(self, title):
- self.called.append(title)
-
-
-class TestEvent:
- def testEvent(self):
- test_obj = ExampleClass()
- test_obj.onChanged.append(lambda: test_obj.increment("Called #1"))
- test_obj.onChanged.append(lambda: test_obj.increment("Called #2"))
- test_obj.onChanged.once(lambda: test_obj.increment("Once"))
-
- assert test_obj.called == []
- test_obj.onChanged()
- assert test_obj.called == ["Called #1", "Called #2", "Once"]
- test_obj.onChanged()
- test_obj.onChanged()
- assert test_obj.called == ["Called #1", "Called #2", "Once", "Called #1", "Called #2", "Called #1", "Called #2"]
-
- def testOnce(self):
- test_obj = ExampleClass()
- test_obj.onChanged.once(lambda: test_obj.increment("Once test #1"))
-
- # It should be called only once
- assert test_obj.called == []
- test_obj.onChanged()
- assert test_obj.called == ["Once test #1"]
- test_obj.onChanged()
- test_obj.onChanged()
- assert test_obj.called == ["Once test #1"]
-
- def testOnceMultiple(self):
- test_obj = ExampleClass()
- # Allow queue more than once
- test_obj.onChanged.once(lambda: test_obj.increment("Once test #1"))
- test_obj.onChanged.once(lambda: test_obj.increment("Once test #2"))
- test_obj.onChanged.once(lambda: test_obj.increment("Once test #3"))
-
- assert test_obj.called == []
- test_obj.onChanged()
- assert test_obj.called == ["Once test #1", "Once test #2", "Once test #3"]
- test_obj.onChanged()
- test_obj.onChanged()
- assert test_obj.called == ["Once test #1", "Once test #2", "Once test #3"]
-
- def testOnceNamed(self):
- test_obj = ExampleClass()
- # Dont store more that one from same type
- test_obj.onChanged.once(lambda: test_obj.increment("Once test #1/1"), "type 1")
- test_obj.onChanged.once(lambda: test_obj.increment("Once test #1/2"), "type 1")
- test_obj.onChanged.once(lambda: test_obj.increment("Once test #2"), "type 2")
-
- assert test_obj.called == []
- test_obj.onChanged()
- assert test_obj.called == ["Once test #1/1", "Once test #2"]
- test_obj.onChanged()
- test_obj.onChanged()
- assert test_obj.called == ["Once test #1/1", "Once test #2"]
diff --git a/src/Test/TestFileRequest.py b/src/Test/TestFileRequest.py
deleted file mode 100644
index 3fabc271..00000000
--- a/src/Test/TestFileRequest.py
+++ /dev/null
@@ -1,124 +0,0 @@
-import io
-
-import pytest
-import time
-
-from Connection import ConnectionServer
-from Connection import Connection
-from File import FileServer
-
-
-@pytest.mark.usefixtures("resetSettings")
-@pytest.mark.usefixtures("resetTempSettings")
-class TestFileRequest:
- def testGetFile(self, file_server, site):
- file_server.ip_incoming = {} # Reset flood protection
- client = ConnectionServer(file_server.ip, 1545)
-
- connection = client.getConnection(file_server.ip, 1544)
- file_server.sites[site.address] = site
-
- # Normal request
- response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0})
- assert b"sign" in response["body"]
-
- response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": site.storage.getSize("content.json")})
- assert b"sign" in response["body"]
-
- # Invalid file
- response = connection.request("getFile", {"site": site.address, "inner_path": "invalid.file", "location": 0})
- assert "File read error" in response["error"]
-
- # Location over size
- response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 1024 * 1024})
- assert "File read error" in response["error"]
-
- # Stream from parent dir
- response = connection.request("getFile", {"site": site.address, "inner_path": "../users.json", "location": 0})
- assert "File read exception" in response["error"]
-
- # Invalid site
- response = connection.request("getFile", {"site": "", "inner_path": "users.json", "location": 0})
- assert "Unknown site" in response["error"]
-
- response = connection.request("getFile", {"site": ".", "inner_path": "users.json", "location": 0})
- assert "Unknown site" in response["error"]
-
- # Invalid size
- response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": 1234})
- assert "File size does not match" in response["error"]
-
- # Invalid path
- for path in ["../users.json", "./../users.json", "data/../content.json", ".../users.json"]:
- for sep in ["/", "\\"]:
- response = connection.request("getFile", {"site": site.address, "inner_path": path.replace("/", sep), "location": 0})
- assert response["error"] == 'File read exception'
-
- connection.close()
- client.stop()
-
- def testStreamFile(self, file_server, site):
- file_server.ip_incoming = {} # Reset flood protection
- client = ConnectionServer(file_server.ip, 1545)
- connection = client.getConnection(file_server.ip, 1544)
- file_server.sites[site.address] = site
-
- buff = io.BytesIO()
- response = connection.request("streamFile", {"site": site.address, "inner_path": "content.json", "location": 0}, buff)
- assert "stream_bytes" in response
- assert b"sign" in buff.getvalue()
-
- # Invalid file
- buff = io.BytesIO()
- response = connection.request("streamFile", {"site": site.address, "inner_path": "invalid.file", "location": 0}, buff)
- assert "File read error" in response["error"]
-
- # Location over size
- buff = io.BytesIO()
- response = connection.request(
- "streamFile", {"site": site.address, "inner_path": "content.json", "location": 1024 * 1024}, buff
- )
- assert "File read error" in response["error"]
-
- # Stream from parent dir
- buff = io.BytesIO()
- response = connection.request("streamFile", {"site": site.address, "inner_path": "../users.json", "location": 0}, buff)
- assert "File read exception" in response["error"]
-
- connection.close()
- client.stop()
-
- def testPex(self, file_server, site, site_temp):
- file_server.sites[site.address] = site
- client = FileServer(file_server.ip, 1545)
- client.sites = {site_temp.address: site_temp}
- site_temp.connection_server = client
- connection = client.getConnection(file_server.ip, 1544)
-
- # Add new fake peer to site
- fake_peer = site.addPeer(file_server.ip_external, 11337, return_peer=True)
- # Add fake connection to it
- fake_peer.connection = Connection(file_server, file_server.ip_external, 11337)
- fake_peer.connection.last_recv_time = time.time()
- assert fake_peer in site.getConnectablePeers()
-
- # Add file_server as peer to client
- peer_file_server = site_temp.addPeer(file_server.ip, 1544)
-
- assert "%s:11337" % file_server.ip_external not in site_temp.peers
- assert peer_file_server.pex()
- assert "%s:11337" % file_server.ip_external in site_temp.peers
-
- # Should not exchange private peers from local network
- fake_peer_private = site.addPeer("192.168.0.1", 11337, return_peer=True)
- assert fake_peer_private not in site.getConnectablePeers(allow_private=False)
- fake_peer_private.connection = Connection(file_server, "192.168.0.1", 11337)
- fake_peer_private.connection.last_recv_time = time.time()
-
- assert "192.168.0.1:11337" not in site_temp.peers
- assert not peer_file_server.pex()
- assert "192.168.0.1:11337" not in site_temp.peers
-
-
- connection.close()
- client.stop()
diff --git a/src/Test/TestFlag.py b/src/Test/TestFlag.py
deleted file mode 100644
index 12fd8165..00000000
--- a/src/Test/TestFlag.py
+++ /dev/null
@@ -1,39 +0,0 @@
-import os
-
-import pytest
-
-from util.Flag import Flag
-
-class TestFlag:
- def testFlagging(self):
- flag = Flag()
- @flag.admin
- @flag.no_multiuser
- def testFn(anything):
- return anything
-
- assert "admin" in flag.db["testFn"]
- assert "no_multiuser" in flag.db["testFn"]
-
- def testSubclassedFlagging(self):
- flag = Flag()
- class Test:
- @flag.admin
- @flag.no_multiuser
- def testFn(anything):
- return anything
-
- class SubTest(Test):
- pass
-
- assert "admin" in flag.db["testFn"]
- assert "no_multiuser" in flag.db["testFn"]
-
- def testInvalidFlag(self):
- flag = Flag()
- with pytest.raises(Exception) as err:
- @flag.no_multiuser
- @flag.unknown_flag
- def testFn(anything):
- return anything
- assert "Invalid flag" in str(err.value)
diff --git a/src/Test/TestHelper.py b/src/Test/TestHelper.py
deleted file mode 100644
index 07644ec0..00000000
--- a/src/Test/TestHelper.py
+++ /dev/null
@@ -1,79 +0,0 @@
-import socket
-import struct
-import os
-
-import pytest
-from util import helper
-from Config import config
-
-
-@pytest.mark.usefixtures("resetSettings")
-class TestHelper:
- def testShellquote(self):
- assert helper.shellquote("hel'lo") == "\"hel'lo\"" # Allow '
- assert helper.shellquote('hel"lo') == '"hello"' # Remove "
- assert helper.shellquote("hel'lo", 'hel"lo') == ('"hel\'lo"', '"hello"')
-
- def testPackAddress(self):
- for port in [1, 1000, 65535]:
- for ip in ["1.1.1.1", "127.0.0.1", "0.0.0.0", "255.255.255.255", "192.168.1.1"]:
- assert len(helper.packAddress(ip, port)) == 6
- assert helper.unpackAddress(helper.packAddress(ip, port)) == (ip, port)
-
- for ip in ["1:2:3:4:5:6:7:8", "::1", "2001:19f0:6c01:e76:5400:1ff:fed6:3eca", "2001:4860:4860::8888"]:
- assert len(helper.packAddress(ip, port)) == 18
- assert helper.unpackAddress(helper.packAddress(ip, port)) == (ip, port)
-
- assert len(helper.packOnionAddress("boot3rdez4rzn36x.onion", port)) == 12
- assert helper.unpackOnionAddress(helper.packOnionAddress("boot3rdez4rzn36x.onion", port)) == ("boot3rdez4rzn36x.onion", port)
-
- with pytest.raises(struct.error):
- helper.packAddress("1.1.1.1", 100000)
-
- with pytest.raises(socket.error):
- helper.packAddress("999.1.1.1", 1)
-
- with pytest.raises(Exception):
- helper.unpackAddress("X")
-
- def testGetDirname(self):
- assert helper.getDirname("data/users/content.json") == "data/users/"
- assert helper.getDirname("data/users") == "data/"
- assert helper.getDirname("") == ""
- assert helper.getDirname("content.json") == ""
- assert helper.getDirname("data/users/") == "data/users/"
- assert helper.getDirname("/data/users/content.json") == "data/users/"
-
- def testGetFilename(self):
- assert helper.getFilename("data/users/content.json") == "content.json"
- assert helper.getFilename("data/users") == "users"
- assert helper.getFilename("") == ""
- assert helper.getFilename("content.json") == "content.json"
- assert helper.getFilename("data/users/") == ""
- assert helper.getFilename("/data/users/content.json") == "content.json"
-
- def testIsIp(self):
- assert helper.isIp("1.2.3.4")
- assert helper.isIp("255.255.255.255")
- assert not helper.isIp("any.host")
- assert not helper.isIp("1.2.3.4.com")
- assert not helper.isIp("1.2.3.4.any.host")
-
- def testIsPrivateIp(self):
- assert helper.isPrivateIp("192.168.1.1")
- assert not helper.isPrivateIp("1.1.1.1")
- assert helper.isPrivateIp("fe80::44f0:3d0:4e6:637c")
- assert not helper.isPrivateIp("fca5:95d6:bfde:d902:8951:276e:1111:a22c") # cjdns
-
- def testOpenLocked(self):
- locked_f = helper.openLocked(config.data_dir + "/locked.file")
- assert locked_f
- with pytest.raises(BlockingIOError):
- locked_f_again = helper.openLocked(config.data_dir + "/locked.file")
- locked_f_different = helper.openLocked(config.data_dir + "/locked_different.file")
-
- locked_f.close()
- locked_f_different.close()
-
- os.unlink(locked_f.name)
- os.unlink(locked_f_different.name)
diff --git a/src/Test/TestMsgpack.py b/src/Test/TestMsgpack.py
deleted file mode 100644
index 5a0b6d4d..00000000
--- a/src/Test/TestMsgpack.py
+++ /dev/null
@@ -1,88 +0,0 @@
-import io
-import os
-
-import msgpack
-import pytest
-
-from Config import config
-from util import Msgpack
-from collections import OrderedDict
-
-
-class TestMsgpack:
- test_data = OrderedDict(
- sorted({"cmd": "fileGet", "bin": b'p\x81zDhL\xf0O\xd0\xaf', "params": {"site": "1Site"}, "utf8": b'\xc3\xa1rv\xc3\xadzt\xc5\xb1r\xc5\x91'.decode("utf8"), "list": [b'p\x81zDhL\xf0O\xd0\xaf', b'p\x81zDhL\xf0O\xd0\xaf']}.items())
- )
-
- def testPacking(self):
- assert Msgpack.pack(self.test_data) == b'\x85\xa3bin\xc4\np\x81zDhL\xf0O\xd0\xaf\xa3cmd\xa7fileGet\xa4list\x92\xc4\np\x81zDhL\xf0O\xd0\xaf\xc4\np\x81zDhL\xf0O\xd0\xaf\xa6params\x81\xa4site\xa51Site\xa4utf8\xad\xc3\xa1rv\xc3\xadzt\xc5\xb1r\xc5\x91'
- assert Msgpack.pack(self.test_data, use_bin_type=False) == b'\x85\xa3bin\xaap\x81zDhL\xf0O\xd0\xaf\xa3cmd\xa7fileGet\xa4list\x92\xaap\x81zDhL\xf0O\xd0\xaf\xaap\x81zDhL\xf0O\xd0\xaf\xa6params\x81\xa4site\xa51Site\xa4utf8\xad\xc3\xa1rv\xc3\xadzt\xc5\xb1r\xc5\x91'
-
- def testUnpackinkg(self):
- assert Msgpack.unpack(Msgpack.pack(self.test_data)) == self.test_data
-
- @pytest.mark.parametrize("unpacker_class", [msgpack.Unpacker, msgpack.fallback.Unpacker])
- def testUnpacker(self, unpacker_class):
- unpacker = unpacker_class(raw=False)
-
- data = msgpack.packb(self.test_data, use_bin_type=True)
- data += msgpack.packb(self.test_data, use_bin_type=True)
-
- messages = []
- for char in data:
- unpacker.feed(bytes([char]))
- for message in unpacker:
- messages.append(message)
-
- assert len(messages) == 2
- assert messages[0] == self.test_data
- assert messages[0] == messages[1]
-
- def testStreaming(self):
- bin_data = os.urandom(20)
- f = Msgpack.FilePart("%s/users.json" % config.data_dir, "rb")
- f.read_bytes = 30
-
- data = {"cmd": "response", "body": f, "bin": bin_data}
-
- out_buff = io.BytesIO()
- Msgpack.stream(data, out_buff.write)
- out_buff.seek(0)
-
- data_packb = {
- "cmd": "response",
- "body": open("%s/users.json" % config.data_dir, "rb").read(30),
- "bin": bin_data
- }
-
- out_buff.seek(0)
- data_unpacked = Msgpack.unpack(out_buff.read())
- assert data_unpacked == data_packb
- assert data_unpacked["cmd"] == "response"
- assert type(data_unpacked["body"]) == bytes
-
- def testBackwardCompatibility(self):
- packed = {}
- packed["py3"] = Msgpack.pack(self.test_data, use_bin_type=False)
- packed["py3_bin"] = Msgpack.pack(self.test_data, use_bin_type=True)
- for key, val in packed.items():
- unpacked = Msgpack.unpack(val)
- type(unpacked["utf8"]) == str
- type(unpacked["bin"]) == bytes
-
- # Packed with use_bin_type=False (pre-ZeroNet 0.7.0)
- unpacked = Msgpack.unpack(packed["py3"], decode=True)
- type(unpacked["utf8"]) == str
- type(unpacked["bin"]) == bytes
- assert len(unpacked["utf8"]) == 9
- assert len(unpacked["bin"]) == 10
- with pytest.raises(UnicodeDecodeError) as err: # Try to decode binary as utf-8
- unpacked = Msgpack.unpack(packed["py3"], decode=False)
-
- # Packed with use_bin_type=True
- unpacked = Msgpack.unpack(packed["py3_bin"], decode=False)
- type(unpacked["utf8"]) == str
- type(unpacked["bin"]) == bytes
- assert len(unpacked["utf8"]) == 9
- assert len(unpacked["bin"]) == 10
-
diff --git a/src/Test/TestNoparallel.py b/src/Test/TestNoparallel.py
deleted file mode 100644
index 6fc4f57d..00000000
--- a/src/Test/TestNoparallel.py
+++ /dev/null
@@ -1,167 +0,0 @@
-import time
-
-import gevent
-import pytest
-
-import util
-from util import ThreadPool
-
-
-@pytest.fixture(params=['gevent.spawn', 'thread_pool.spawn'])
-def queue_spawn(request):
- thread_pool = ThreadPool.ThreadPool(10)
- if request.param == "gevent.spawn":
- return gevent.spawn
- else:
- return thread_pool.spawn
-
-
-class ExampleClass(object):
- def __init__(self):
- self.counted = 0
-
- @util.Noparallel()
- def countBlocking(self, num=5):
- for i in range(1, num + 1):
- time.sleep(0.1)
- self.counted += 1
- return "counted:%s" % i
-
- @util.Noparallel(queue=True, ignore_class=True)
- def countQueue(self, num=5):
- for i in range(1, num + 1):
- time.sleep(0.1)
- self.counted += 1
- return "counted:%s" % i
-
- @util.Noparallel(blocking=False)
- def countNoblocking(self, num=5):
- for i in range(1, num + 1):
- time.sleep(0.01)
- self.counted += 1
- return "counted:%s" % i
-
-
-class TestNoparallel:
- def testBlocking(self, queue_spawn):
- obj1 = ExampleClass()
- obj2 = ExampleClass()
-
- # Dont allow to call again until its running and wait until its running
- threads = [
- queue_spawn(obj1.countBlocking),
- queue_spawn(obj1.countBlocking),
- queue_spawn(obj1.countBlocking),
- queue_spawn(obj2.countBlocking)
- ]
- assert obj2.countBlocking() == "counted:5" # The call is ignored as obj2.countBlocking already counting, but block until its finishes
- gevent.joinall(threads)
- assert [thread.value for thread in threads] == ["counted:5", "counted:5", "counted:5", "counted:5"]
- obj2.countBlocking() # Allow to call again as obj2.countBlocking finished
-
- assert obj1.counted == 5
- assert obj2.counted == 10
-
- def testNoblocking(self):
- obj1 = ExampleClass()
-
- thread1 = obj1.countNoblocking()
- thread2 = obj1.countNoblocking() # Ignored
-
- assert obj1.counted == 0
- time.sleep(0.1)
- assert thread1.value == "counted:5"
- assert thread2.value == "counted:5"
- assert obj1.counted == 5
-
- obj1.countNoblocking().join() # Allow again and wait until finishes
- assert obj1.counted == 10
-
- def testQueue(self, queue_spawn):
- obj1 = ExampleClass()
-
- queue_spawn(obj1.countQueue, num=1)
- queue_spawn(obj1.countQueue, num=1)
- queue_spawn(obj1.countQueue, num=1)
-
- time.sleep(0.3)
- assert obj1.counted == 2 # No multi-queue supported
-
- obj2 = ExampleClass()
- queue_spawn(obj2.countQueue, num=10)
- queue_spawn(obj2.countQueue, num=10)
-
- time.sleep(1.5) # Call 1 finished, call 2 still working
- assert 10 < obj2.counted < 20
-
- queue_spawn(obj2.countQueue, num=10)
- time.sleep(2.0)
-
- assert obj2.counted == 30
-
- def testQueueOverload(self):
- obj1 = ExampleClass()
-
- threads = []
- for i in range(1000):
- thread = gevent.spawn(obj1.countQueue, num=5)
- threads.append(thread)
-
- gevent.joinall(threads)
- assert obj1.counted == 5 * 2 # Only called twice (no multi-queue allowed)
-
- def testIgnoreClass(self, queue_spawn):
- obj1 = ExampleClass()
- obj2 = ExampleClass()
-
- threads = [
- queue_spawn(obj1.countQueue),
- queue_spawn(obj1.countQueue),
- queue_spawn(obj1.countQueue),
- queue_spawn(obj2.countQueue),
- queue_spawn(obj2.countQueue)
- ]
- s = time.time()
- time.sleep(0.001)
- gevent.joinall(threads)
-
- # Queue limited to 2 calls (every call takes counts to 5 and takes 0.05 sec)
- assert obj1.counted + obj2.counted == 10
-
- taken = time.time() - s
- assert 1.2 > taken >= 1.0 # 2 * 0.5s count = ~1s
-
- def testException(self, queue_spawn):
- class MyException(Exception):
- pass
-
- @util.Noparallel()
- def raiseException():
- raise MyException("Test error!")
-
- with pytest.raises(MyException) as err:
- raiseException()
- assert str(err.value) == "Test error!"
-
- with pytest.raises(MyException) as err:
- queue_spawn(raiseException).get()
- assert str(err.value) == "Test error!"
-
- def testMultithreadMix(self, queue_spawn):
- obj1 = ExampleClass()
- with ThreadPool.ThreadPool(10) as thread_pool:
- s = time.time()
- t1 = queue_spawn(obj1.countBlocking, 5)
- time.sleep(0.01)
- t2 = thread_pool.spawn(obj1.countBlocking, 5)
- time.sleep(0.01)
- t3 = thread_pool.spawn(obj1.countBlocking, 5)
- time.sleep(0.3)
- t4 = gevent.spawn(obj1.countBlocking, 5)
- threads = [t1, t2, t3, t4]
- for thread in threads:
- assert thread.get() == "counted:5"
-
- time_taken = time.time() - s
- assert obj1.counted == 5
- assert 0.5 < time_taken < 0.7
diff --git a/src/Test/TestPeer.py b/src/Test/TestPeer.py
deleted file mode 100644
index f57e046e..00000000
--- a/src/Test/TestPeer.py
+++ /dev/null
@@ -1,159 +0,0 @@
-import time
-import io
-
-import pytest
-
-from File import FileServer
-from File import FileRequest
-from Crypt import CryptHash
-from . import Spy
-
-
-@pytest.mark.usefixtures("resetSettings")
-@pytest.mark.usefixtures("resetTempSettings")
-class TestPeer:
- def testPing(self, file_server, site, site_temp):
- file_server.sites[site.address] = site
- client = FileServer(file_server.ip, 1545)
- client.sites = {site_temp.address: site_temp}
- site_temp.connection_server = client
- connection = client.getConnection(file_server.ip, 1544)
-
- # Add file_server as peer to client
- peer_file_server = site_temp.addPeer(file_server.ip, 1544)
-
- assert peer_file_server.ping() is not None
-
- assert peer_file_server in site_temp.peers.values()
- peer_file_server.remove()
- assert peer_file_server not in site_temp.peers.values()
-
- connection.close()
- client.stop()
-
- def testDownloadFile(self, file_server, site, site_temp):
- file_server.sites[site.address] = site
- client = FileServer(file_server.ip, 1545)
- client.sites = {site_temp.address: site_temp}
- site_temp.connection_server = client
- connection = client.getConnection(file_server.ip, 1544)
-
- # Add file_server as peer to client
- peer_file_server = site_temp.addPeer(file_server.ip, 1544)
-
- # Testing streamFile
- buff = peer_file_server.getFile(site_temp.address, "content.json", streaming=True)
- assert b"sign" in buff.getvalue()
-
- # Testing getFile
- buff = peer_file_server.getFile(site_temp.address, "content.json")
- assert b"sign" in buff.getvalue()
-
- connection.close()
- client.stop()
-
- def testHashfield(self, site):
- sample_hash = list(site.content_manager.contents["content.json"]["files_optional"].values())[0]["sha512"]
-
- site.storage.verifyFiles(quick_check=True) # Find what optional files we have
-
- # Check if hashfield has any files
- assert site.content_manager.hashfield
- assert len(site.content_manager.hashfield) > 0
-
- # Check exsist hash
- assert site.content_manager.hashfield.getHashId(sample_hash) in site.content_manager.hashfield
-
- # Add new hash
- new_hash = CryptHash.sha512sum(io.BytesIO(b"hello"))
- assert site.content_manager.hashfield.getHashId(new_hash) not in site.content_manager.hashfield
- assert site.content_manager.hashfield.appendHash(new_hash)
- assert not site.content_manager.hashfield.appendHash(new_hash) # Don't add second time
- assert site.content_manager.hashfield.getHashId(new_hash) in site.content_manager.hashfield
-
- # Remove new hash
- assert site.content_manager.hashfield.removeHash(new_hash)
- assert site.content_manager.hashfield.getHashId(new_hash) not in site.content_manager.hashfield
-
- def testHashfieldExchange(self, file_server, site, site_temp):
- server1 = file_server
- server1.sites[site.address] = site
- site.connection_server = server1
-
- server2 = FileServer(file_server.ip, 1545)
- server2.sites[site_temp.address] = site_temp
- site_temp.connection_server = server2
- site.storage.verifyFiles(quick_check=True) # Find what optional files we have
-
- # Add file_server as peer to client
- server2_peer1 = site_temp.addPeer(file_server.ip, 1544)
-
- # Check if hashfield has any files
- assert len(site.content_manager.hashfield) > 0
-
- # Testing hashfield sync
- assert len(server2_peer1.hashfield) == 0
- assert server2_peer1.updateHashfield() # Query hashfield from peer
- assert len(server2_peer1.hashfield) > 0
-
- # Test force push new hashfield
- site_temp.content_manager.hashfield.appendHash("AABB")
- server1_peer2 = site.addPeer(file_server.ip, 1545, return_peer=True)
- with Spy.Spy(FileRequest, "route") as requests:
- assert len(server1_peer2.hashfield) == 0
- server2_peer1.sendMyHashfield()
- assert len(server1_peer2.hashfield) == 1
- server2_peer1.sendMyHashfield() # Hashfield not changed, should be ignored
-
- assert len(requests) == 1
-
- time.sleep(0.01) # To make hashfield change date different
-
- site_temp.content_manager.hashfield.appendHash("AACC")
- server2_peer1.sendMyHashfield() # Push hashfield
-
- assert len(server1_peer2.hashfield) == 2
- assert len(requests) == 2
-
- site_temp.content_manager.hashfield.appendHash("AADD")
-
- assert server1_peer2.updateHashfield(force=True) # Request hashfield
- assert len(server1_peer2.hashfield) == 3
- assert len(requests) == 3
-
- assert not server2_peer1.sendMyHashfield() # Not changed, should be ignored
- assert len(requests) == 3
-
- server2.stop()
-
- def testFindHash(self, file_server, site, site_temp):
- file_server.sites[site.address] = site
- client = FileServer(file_server.ip, 1545)
- client.sites = {site_temp.address: site_temp}
- site_temp.connection_server = client
-
- # Add file_server as peer to client
- peer_file_server = site_temp.addPeer(file_server.ip, 1544)
-
- assert peer_file_server.findHashIds([1234]) == {}
-
- # Add fake peer with requred hash
- fake_peer_1 = site.addPeer(file_server.ip_external, 1544)
- fake_peer_1.hashfield.append(1234)
- fake_peer_2 = site.addPeer("1.2.3.5", 1545)
- fake_peer_2.hashfield.append(1234)
- fake_peer_2.hashfield.append(1235)
- fake_peer_3 = site.addPeer("1.2.3.6", 1546)
- fake_peer_3.hashfield.append(1235)
- fake_peer_3.hashfield.append(1236)
-
- res = peer_file_server.findHashIds([1234, 1235])
- assert sorted(res[1234]) == sorted([(file_server.ip_external, 1544), ("1.2.3.5", 1545)])
- assert sorted(res[1235]) == sorted([("1.2.3.5", 1545), ("1.2.3.6", 1546)])
-
- # Test my address adding
- site.content_manager.hashfield.append(1234)
-
- res = peer_file_server.findHashIds([1234, 1235])
- assert sorted(res[1234]) == sorted([(file_server.ip_external, 1544), ("1.2.3.5", 1545), (file_server.ip, 1544)])
- assert sorted(res[1235]) == sorted([("1.2.3.5", 1545), ("1.2.3.6", 1546)])
diff --git a/src/Test/TestRateLimit.py b/src/Test/TestRateLimit.py
deleted file mode 100644
index fafa5f1a..00000000
--- a/src/Test/TestRateLimit.py
+++ /dev/null
@@ -1,100 +0,0 @@
-import time
-
-import gevent
-
-from util import RateLimit
-
-
-# Time is around limit +/- 0.05 sec
-def around(t, limit):
- return t >= limit - 0.05 and t <= limit + 0.05
-
-
-class ExampleClass(object):
- def __init__(self):
- self.counted = 0
- self.last_called = None
-
- def count(self, back="counted"):
- self.counted += 1
- self.last_called = back
- return back
-
-
-class TestRateLimit:
- def testCall(self):
- obj1 = ExampleClass()
- obj2 = ExampleClass()
-
- s = time.time()
- assert RateLimit.call("counting", allowed_again=0.1, func=obj1.count) == "counted"
- assert around(time.time() - s, 0.0) # First allow to call instantly
- assert obj1.counted == 1
-
- # Call again
- assert not RateLimit.isAllowed("counting", 0.1)
- assert RateLimit.isAllowed("something else", 0.1)
- assert RateLimit.call("counting", allowed_again=0.1, func=obj1.count) == "counted"
- assert around(time.time() - s, 0.1) # Delays second call within interval
- assert obj1.counted == 2
- time.sleep(0.1) # Wait the cooldown time
-
- # Call 3 times async
- s = time.time()
- assert obj2.counted == 0
- threads = [
- gevent.spawn(lambda: RateLimit.call("counting", allowed_again=0.1, func=obj2.count)), # Instant
- gevent.spawn(lambda: RateLimit.call("counting", allowed_again=0.1, func=obj2.count)), # 0.1s delay
- gevent.spawn(lambda: RateLimit.call("counting", allowed_again=0.1, func=obj2.count)) # 0.2s delay
- ]
- gevent.joinall(threads)
- assert [thread.value for thread in threads] == ["counted", "counted", "counted"]
- assert around(time.time() - s, 0.2)
-
- # Wait 0.1s cooldown
- assert not RateLimit.isAllowed("counting", 0.1)
- time.sleep(0.11)
- assert RateLimit.isAllowed("counting", 0.1)
-
- # No queue = instant again
- s = time.time()
- assert RateLimit.isAllowed("counting", 0.1)
- assert RateLimit.call("counting", allowed_again=0.1, func=obj2.count) == "counted"
- assert around(time.time() - s, 0.0)
-
- assert obj2.counted == 4
-
- def testCallAsync(self):
- obj1 = ExampleClass()
- obj2 = ExampleClass()
-
- s = time.time()
- RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #1").join()
- assert obj1.counted == 1 # First instant
- assert around(time.time() - s, 0.0)
-
- # After that the calls delayed
- s = time.time()
- t1 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #2") # Dumped by the next call
- time.sleep(0.03)
- t2 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #3") # Dumped by the next call
- time.sleep(0.03)
- t3 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #4") # Will be called
- assert obj1.counted == 1 # Delay still in progress: Not called yet
- t3.join()
- assert t3.value == "call #4"
- assert around(time.time() - s, 0.1)
-
- # Only the last one called
- assert obj1.counted == 2
- assert obj1.last_called == "call #4"
-
- # Just called, not allowed again
- assert not RateLimit.isAllowed("counting async", 0.1)
- s = time.time()
- t4 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #5").join()
- assert obj1.counted == 3
- assert around(time.time() - s, 0.1)
- assert not RateLimit.isAllowed("counting async", 0.1)
- time.sleep(0.11)
- assert RateLimit.isAllowed("counting async", 0.1)
diff --git a/src/Test/TestSafeRe.py b/src/Test/TestSafeRe.py
deleted file mode 100644
index 429bde50..00000000
--- a/src/Test/TestSafeRe.py
+++ /dev/null
@@ -1,24 +0,0 @@
-from util import SafeRe
-
-import pytest
-
-
-class TestSafeRe:
- def testSafeMatch(self):
- assert SafeRe.match(
- "((js|css)/(?!all.(js|css))|data/users/.*db|data/users/.*/.*|data/archived|.*.py)",
- "js/ZeroTalk.coffee"
- )
- assert SafeRe.match(".+/data.json", "data/users/1J3rJ8ecnwH2EPYa6MrgZttBNc61ACFiCj/data.json")
-
- @pytest.mark.parametrize("pattern", ["([a-zA-Z]+)*", "(a|aa)+*", "(a|a?)+", "(.*a){10}", "((?!json).)*$", r"(\w+\d+)+C"])
- def testUnsafeMatch(self, pattern):
- with pytest.raises(SafeRe.UnsafePatternError) as err:
- SafeRe.match(pattern, "aaaaaaaaaaaaaaaaaaaaaaaa!")
- assert "Potentially unsafe" in str(err.value)
-
- @pytest.mark.parametrize("pattern", ["^(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)$"])
- def testUnsafeRepetition(self, pattern):
- with pytest.raises(SafeRe.UnsafePatternError) as err:
- SafeRe.match(pattern, "aaaaaaaaaaaaaaaaaaaaaaaa!")
- assert "More than" in str(err.value)
diff --git a/src/Test/TestSite.py b/src/Test/TestSite.py
deleted file mode 100644
index 05bb2ed9..00000000
--- a/src/Test/TestSite.py
+++ /dev/null
@@ -1,70 +0,0 @@
-import shutil
-import os
-
-import pytest
-from Site import SiteManager
-
-TEST_DATA_PATH = "src/Test/testdata"
-
-@pytest.mark.usefixtures("resetSettings")
-class TestSite:
- def testClone(self, site):
- assert site.storage.directory == TEST_DATA_PATH + "/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"
-
- # Remove old files
- if os.path.isdir(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL"):
- shutil.rmtree(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL")
- assert not os.path.isfile(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL/content.json")
-
- # Clone 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT to 15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc
- new_site = site.clone(
- "159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL", "5JU2p5h3R7B1WrbaEdEDNZR7YHqRLGcjNcqwqVQzX2H4SuNe2ee", address_index=1
- )
-
- # Check if clone was successful
- assert new_site.address == "159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL"
- assert new_site.storage.isFile("content.json")
- assert new_site.storage.isFile("index.html")
- assert new_site.storage.isFile("data/users/content.json")
- assert new_site.storage.isFile("data/zeroblog.db")
- assert new_site.storage.verifyFiles()["bad_files"] == [] # No bad files allowed
- assert new_site.storage.query("SELECT * FROM keyvalue WHERE key = 'title'").fetchone()["value"] == "MyZeroBlog"
-
- # Optional files should be removed
-
- assert len(new_site.storage.loadJson("content.json").get("files_optional", {})) == 0
-
- # Test re-cloning (updating)
-
- # Changes in non-data files should be overwritten
- new_site.storage.write("index.html", b"this will be overwritten")
- assert new_site.storage.read("index.html") == b"this will be overwritten"
-
- # Changes in data file should be kept after re-cloning
- changed_contentjson = new_site.storage.loadJson("content.json")
- changed_contentjson["description"] = "Update Description Test"
- new_site.storage.writeJson("content.json", changed_contentjson)
-
- changed_data = new_site.storage.loadJson("data/data.json")
- changed_data["title"] = "UpdateTest"
- new_site.storage.writeJson("data/data.json", changed_data)
-
- # The update should be reflected to database
- assert new_site.storage.query("SELECT * FROM keyvalue WHERE key = 'title'").fetchone()["value"] == "UpdateTest"
-
- # Re-clone the site
- site.log.debug("Re-cloning")
- site.clone("159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL")
-
- assert new_site.storage.loadJson("data/data.json")["title"] == "UpdateTest"
- assert new_site.storage.loadJson("content.json")["description"] == "Update Description Test"
- assert new_site.storage.read("index.html") != "this will be overwritten"
-
- # Delete created files
- new_site.storage.deleteFiles()
- assert not os.path.isdir(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL")
-
- # Delete from site registry
- assert new_site.address in SiteManager.site_manager.sites
- SiteManager.site_manager.delete(new_site.address)
- assert new_site.address not in SiteManager.site_manager.sites
diff --git a/src/Test/TestSiteDownload.py b/src/Test/TestSiteDownload.py
deleted file mode 100644
index cd0a4c9f..00000000
--- a/src/Test/TestSiteDownload.py
+++ /dev/null
@@ -1,562 +0,0 @@
-import time
-
-import pytest
-import mock
-import gevent
-import gevent.event
-import os
-
-from Connection import ConnectionServer
-from Config import config
-from File import FileRequest
-from File import FileServer
-from Site.Site import Site
-from . import Spy
-
-
-@pytest.mark.usefixtures("resetTempSettings")
-@pytest.mark.usefixtures("resetSettings")
-class TestSiteDownload:
- def testRename(self, file_server, site, site_temp):
- assert site.storage.directory == config.data_dir + "/" + site.address
- assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
-
- # Init source server
- site.connection_server = file_server
- file_server.sites[site.address] = site
-
- # Init client server
- client = FileServer(file_server.ip, 1545)
- client.sites = {site_temp.address: site_temp}
- site_temp.connection_server = client
- site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
-
-
- site_temp.addPeer(file_server.ip, 1544)
-
- assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
-
- assert site_temp.storage.isFile("content.json")
-
- # Rename non-optional file
- os.rename(site.storage.getPath("data/img/domain.png"), site.storage.getPath("data/img/domain-new.png"))
-
- site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
-
- content = site.storage.loadJson("content.json")
- assert "data/img/domain-new.png" in content["files"]
- assert "data/img/domain.png" not in content["files"]
- assert not site_temp.storage.isFile("data/img/domain-new.png")
- assert site_temp.storage.isFile("data/img/domain.png")
- settings_before = site_temp.settings
-
- with Spy.Spy(FileRequest, "route") as requests:
- site.publish()
- time.sleep(0.1)
- assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
- assert "streamFile" not in [req[1] for req in requests]
-
- content = site_temp.storage.loadJson("content.json")
- assert "data/img/domain-new.png" in content["files"]
- assert "data/img/domain.png" not in content["files"]
- assert site_temp.storage.isFile("data/img/domain-new.png")
- assert not site_temp.storage.isFile("data/img/domain.png")
-
- assert site_temp.settings["size"] == settings_before["size"]
- assert site_temp.settings["size_optional"] == settings_before["size_optional"]
-
- assert site_temp.storage.deleteFiles()
- [connection.close() for connection in file_server.connections]
-
- def testRenameOptional(self, file_server, site, site_temp):
- assert site.storage.directory == config.data_dir + "/" + site.address
- assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
-
- # Init source server
- site.connection_server = file_server
- file_server.sites[site.address] = site
-
- # Init client server
- client = FileServer(file_server.ip, 1545)
- client.sites = {site_temp.address: site_temp}
- site_temp.connection_server = client
- site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
-
-
- site_temp.addPeer(file_server.ip, 1544)
-
- assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
-
- assert site_temp.settings["optional_downloaded"] == 0
-
- site_temp.needFile("data/optional.txt")
-
- assert site_temp.settings["optional_downloaded"] > 0
- settings_before = site_temp.settings
- hashfield_before = site_temp.content_manager.hashfield.tobytes()
-
- # Rename optional file
- os.rename(site.storage.getPath("data/optional.txt"), site.storage.getPath("data/optional-new.txt"))
-
- site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", remove_missing_optional=True)
-
- content = site.storage.loadJson("content.json")
- assert "data/optional-new.txt" in content["files_optional"]
- assert "data/optional.txt" not in content["files_optional"]
- assert not site_temp.storage.isFile("data/optional-new.txt")
- assert site_temp.storage.isFile("data/optional.txt")
-
- with Spy.Spy(FileRequest, "route") as requests:
- site.publish()
- time.sleep(0.1)
- assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
- assert "streamFile" not in [req[1] for req in requests]
-
- content = site_temp.storage.loadJson("content.json")
- assert "data/optional-new.txt" in content["files_optional"]
- assert "data/optional.txt" not in content["files_optional"]
- assert site_temp.storage.isFile("data/optional-new.txt")
- assert not site_temp.storage.isFile("data/optional.txt")
-
- assert site_temp.settings["size"] == settings_before["size"]
- assert site_temp.settings["size_optional"] == settings_before["size_optional"]
- assert site_temp.settings["optional_downloaded"] == settings_before["optional_downloaded"]
- assert site_temp.content_manager.hashfield.tobytes() == hashfield_before
-
- assert site_temp.storage.deleteFiles()
- [connection.close() for connection in file_server.connections]
-
-
- def testArchivedDownload(self, file_server, site, site_temp):
- # Init source server
- site.connection_server = file_server
- file_server.sites[site.address] = site
-
- # Init client server
- client = FileServer(file_server.ip, 1545)
- client.sites = {site_temp.address: site_temp}
- site_temp.connection_server = client
-
- # Download normally
- site_temp.addPeer(file_server.ip, 1544)
- assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
- bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"]
-
- assert not bad_files
- assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents
- assert site_temp.storage.isFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json")
- assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2
-
- # Add archived data
- assert "archived" not in site.content_manager.contents["data/users/content.json"]["user_contents"]
- assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", time.time()-1)
-
- site.content_manager.contents["data/users/content.json"]["user_contents"]["archived"] = {"1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q": time.time()}
- site.content_manager.sign("data/users/content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
-
- date_archived = site.content_manager.contents["data/users/content.json"]["user_contents"]["archived"]["1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q"]
- assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived-1)
- assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived)
- assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived+1) # Allow user to update archived data later
-
- # Push archived update
- assert not "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
- site.publish()
- time.sleep(0.1)
- assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
-
- # The archived content should disappear from remote client
- assert "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
- assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" not in site_temp.content_manager.contents
- assert not site_temp.storage.isDir("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q")
- assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 1
- assert len(list(site_temp.storage.query("SELECT * FROM json WHERE directory LIKE '%1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q%'"))) == 0
-
- assert site_temp.storage.deleteFiles()
- [connection.close() for connection in file_server.connections]
-
- def testArchivedBeforeDownload(self, file_server, site, site_temp):
- # Init source server
- site.connection_server = file_server
- file_server.sites[site.address] = site
-
- # Init client server
- client = FileServer(file_server.ip, 1545)
- client.sites = {site_temp.address: site_temp}
- site_temp.connection_server = client
-
- # Download normally
- site_temp.addPeer(file_server.ip, 1544)
- assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
- bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"]
-
- assert not bad_files
- assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents
- assert site_temp.storage.isFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json")
- assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2
-
- # Add archived data
- assert not "archived_before" in site.content_manager.contents["data/users/content.json"]["user_contents"]
- assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", time.time()-1)
-
- content_modification_time = site.content_manager.contents["data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json"]["modified"]
- site.content_manager.contents["data/users/content.json"]["user_contents"]["archived_before"] = content_modification_time
- site.content_manager.sign("data/users/content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
-
- date_archived = site.content_manager.contents["data/users/content.json"]["user_contents"]["archived_before"]
- assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived-1)
- assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived)
- assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived+1) # Allow user to update archived data later
-
- # Push archived update
- assert not "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
- site.publish()
- time.sleep(0.1)
- assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
-
- # The archived content should disappear from remote client
- assert "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
- assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" not in site_temp.content_manager.contents
- assert not site_temp.storage.isDir("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q")
- assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 1
- assert len(list(site_temp.storage.query("SELECT * FROM json WHERE directory LIKE '%1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q%'"))) == 0
-
- assert site_temp.storage.deleteFiles()
- [connection.close() for connection in file_server.connections]
-
-
- # Test when connected peer has the optional file
- def testOptionalDownload(self, file_server, site, site_temp):
- # Init source server
- site.connection_server = file_server
- file_server.sites[site.address] = site
-
- # Init client server
- client = ConnectionServer(file_server.ip, 1545)
- site_temp.connection_server = client
- site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
-
- site_temp.addPeer(file_server.ip, 1544)
-
- # Download site
- assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
-
- # Download optional data/optional.txt
- site.storage.verifyFiles(quick_check=True) # Find what optional files we have
- optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt")
- assert site.content_manager.hashfield.hasHash(optional_file_info["sha512"])
- assert not site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"])
-
- assert not site_temp.storage.isFile("data/optional.txt")
- assert site.storage.isFile("data/optional.txt")
- site_temp.needFile("data/optional.txt")
- assert site_temp.storage.isFile("data/optional.txt")
-
- # Optional user file
- assert not site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
- optional_file_info = site_temp.content_manager.getFileInfo(
- "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif"
- )
- assert site.content_manager.hashfield.hasHash(optional_file_info["sha512"])
- assert not site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"])
-
- site_temp.needFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
- assert site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
- assert site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"])
-
- assert site_temp.storage.deleteFiles()
- [connection.close() for connection in file_server.connections]
-
- # Test when connected peer does not has the file, so ask him if he know someone who has it
- def testFindOptional(self, file_server, site, site_temp):
- # Init source server
- site.connection_server = file_server
- file_server.sites[site.address] = site
-
- # Init full source server (has optional files)
- site_full = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
- file_server_full = FileServer(file_server.ip, 1546)
- site_full.connection_server = file_server_full
-
- def listen():
- ConnectionServer.start(file_server_full)
- ConnectionServer.listen(file_server_full)
-
- gevent.spawn(listen)
- time.sleep(0.001) # Port opening
- file_server_full.sites[site_full.address] = site_full # Add site
- site_full.storage.verifyFiles(quick_check=True) # Check optional files
- site_full_peer = site.addPeer(file_server.ip, 1546) # Add it to source server
- hashfield = site_full_peer.updateHashfield() # Update hashfield
- assert len(site_full.content_manager.hashfield) == 8
- assert hashfield
- assert site_full.storage.isFile("data/optional.txt")
- assert site_full.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
- assert len(site_full_peer.hashfield) == 8
-
- # Remove hashes from source server
- for hash in list(site.content_manager.hashfield):
- site.content_manager.hashfield.remove(hash)
-
- # Init client server
- site_temp.connection_server = ConnectionServer(file_server.ip, 1545)
- site_temp.addPeer(file_server.ip, 1544) # Add source server
-
- # Download normal files
- site_temp.log.info("Start Downloading site")
- assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
-
- # Download optional data/optional.txt
- optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt")
- optional_file_info2 = site_temp.content_manager.getFileInfo("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
- assert not site_temp.storage.isFile("data/optional.txt")
- assert not site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
- assert not site.content_manager.hashfield.hasHash(optional_file_info["sha512"]) # Source server don't know he has the file
- assert not site.content_manager.hashfield.hasHash(optional_file_info2["sha512"]) # Source server don't know he has the file
- assert site_full_peer.hashfield.hasHash(optional_file_info["sha512"]) # Source full peer on source server has the file
- assert site_full_peer.hashfield.hasHash(optional_file_info2["sha512"]) # Source full peer on source server has the file
- assert site_full.content_manager.hashfield.hasHash(optional_file_info["sha512"]) # Source full server he has the file
- assert site_full.content_manager.hashfield.hasHash(optional_file_info2["sha512"]) # Source full server he has the file
-
- site_temp.log.info("Request optional files")
- with Spy.Spy(FileRequest, "route") as requests:
- # Request 2 file same time
- threads = []
- threads.append(site_temp.needFile("data/optional.txt", blocking=False))
- threads.append(site_temp.needFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif", blocking=False))
- gevent.joinall(threads)
-
- assert len([request for request in requests if request[1] == "findHashIds"]) == 1 # findHashids should call only once
-
- assert site_temp.storage.isFile("data/optional.txt")
- assert site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
-
- assert site_temp.storage.deleteFiles()
- file_server_full.stop()
- [connection.close() for connection in file_server.connections]
- site_full.content_manager.contents.db.close("FindOptional test end")
-
- def testUpdate(self, file_server, site, site_temp):
- assert site.storage.directory == config.data_dir + "/" + site.address
- assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
-
- # Init source server
- site.connection_server = file_server
- file_server.sites[site.address] = site
-
- # Init client server
- client = FileServer(file_server.ip, 1545)
- client.sites = {site_temp.address: site_temp}
- site_temp.connection_server = client
-
- # Don't try to find peers from the net
- site.announce = mock.MagicMock(return_value=True)
- site_temp.announce = mock.MagicMock(return_value=True)
-
- # Connect peers
- site_temp.addPeer(file_server.ip, 1544)
-
- # Download site from site to site_temp
- assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
- assert len(site_temp.bad_files) == 1
-
- # Update file
- data_original = site.storage.open("data/data.json").read()
- data_new = data_original.replace(b'"ZeroBlog"', b'"UpdatedZeroBlog"')
- assert data_original != data_new
-
- site.storage.open("data/data.json", "wb").write(data_new)
-
- assert site.storage.open("data/data.json").read() == data_new
- assert site_temp.storage.open("data/data.json").read() == data_original
-
- site.log.info("Publish new data.json without patch")
- # Publish without patch
- with Spy.Spy(FileRequest, "route") as requests:
- site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
- site.publish()
- time.sleep(0.1)
- site.log.info("Downloading site")
- assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
- assert len([request for request in requests if request[1] in ("getFile", "streamFile")]) == 1
-
- assert site_temp.storage.open("data/data.json").read() == data_new
-
- # Close connection to avoid update spam limit
- list(site.peers.values())[0].remove()
- site.addPeer(file_server.ip, 1545)
- list(site_temp.peers.values())[0].ping() # Connect back
- time.sleep(0.1)
-
- # Update with patch
- data_new = data_original.replace(b'"ZeroBlog"', b'"PatchedZeroBlog"')
- assert data_original != data_new
-
- site.storage.open("data/data.json-new", "wb").write(data_new)
-
- assert site.storage.open("data/data.json-new").read() == data_new
- assert site_temp.storage.open("data/data.json").read() != data_new
-
- # Generate diff
- diffs = site.content_manager.getDiffs("content.json")
- assert not site.storage.isFile("data/data.json-new") # New data file removed
- assert site.storage.open("data/data.json").read() == data_new # -new postfix removed
- assert "data/data.json" in diffs
- assert diffs["data/data.json"] == [('=', 2), ('-', 29), ('+', [b'\t"title": "PatchedZeroBlog",\n']), ('=', 31102)]
-
- # Publish with patch
- site.log.info("Publish new data.json with patch")
- with Spy.Spy(FileRequest, "route") as requests:
- site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
-
- event_done = gevent.event.AsyncResult()
- site.publish(diffs=diffs)
- time.sleep(0.1)
- assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
- assert [request for request in requests if request[1] in ("getFile", "streamFile")] == []
-
- assert site_temp.storage.open("data/data.json").read() == data_new
-
- assert site_temp.storage.deleteFiles()
- [connection.close() for connection in file_server.connections]
-
- def testBigUpdate(self, file_server, site, site_temp):
- # Init source server
- site.connection_server = file_server
- file_server.sites[site.address] = site
-
- # Init client server
- client = FileServer(file_server.ip, 1545)
- client.sites = {site_temp.address: site_temp}
- site_temp.connection_server = client
-
- # Connect peers
- site_temp.addPeer(file_server.ip, 1544)
-
- # Download site from site to site_temp
- assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
- assert list(site_temp.bad_files.keys()) == ["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"]
-
- # Update file
- data_original = site.storage.open("data/data.json").read()
- data_new = data_original.replace(b'"ZeroBlog"', b'"PatchedZeroBlog"')
- assert data_original != data_new
-
- site.storage.open("data/data.json-new", "wb").write(data_new)
-
- assert site.storage.open("data/data.json-new").read() == data_new
- assert site_temp.storage.open("data/data.json").read() != data_new
-
- # Generate diff
- diffs = site.content_manager.getDiffs("content.json")
- assert not site.storage.isFile("data/data.json-new") # New data file removed
- assert site.storage.open("data/data.json").read() == data_new # -new postfix removed
- assert "data/data.json" in diffs
-
- content_json = site.storage.loadJson("content.json")
- content_json["description"] = "BigZeroBlog" * 1024 * 10
- site.storage.writeJson("content.json", content_json)
- site.content_manager.loadContent("content.json", force=True)
-
- # Publish with patch
- site.log.info("Publish new data.json with patch")
- with Spy.Spy(FileRequest, "route") as requests:
- site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
- assert site.storage.getSize("content.json") > 10 * 1024 # Make it a big content.json
- site.publish(diffs=diffs)
- time.sleep(0.1)
- assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
- file_requests = [request for request in requests if request[1] in ("getFile", "streamFile")]
- assert len(file_requests) == 1
-
- assert site_temp.storage.open("data/data.json").read() == data_new
- assert site_temp.storage.open("content.json").read() == site.storage.open("content.json").read()
-
- # Test what happened if the content.json of the site is bigger than the site limit
- def testHugeContentSiteUpdate(self, file_server, site, site_temp):
- # Init source server
- site.connection_server = file_server
- file_server.sites[site.address] = site
-
- # Init client server
- client = FileServer(file_server.ip, 1545)
- client.sites = {site_temp.address: site_temp}
- site_temp.connection_server = client
-
- # Connect peers
- site_temp.addPeer(file_server.ip, 1544)
-
- # Download site from site to site_temp
- assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
- site_temp.settings["size_limit"] = int(20 * 1024 *1024)
- site_temp.saveSettings()
-
- # Raise limit size to 20MB on site so it can be signed
- site.settings["size_limit"] = int(20 * 1024 *1024)
- site.saveSettings()
-
- content_json = site.storage.loadJson("content.json")
- content_json["description"] = "PartirUnJour" * 1024 * 1024
- site.storage.writeJson("content.json", content_json)
- changed, deleted = site.content_manager.loadContent("content.json", force=True)
-
- # Make sure we have 2 differents content.json
- assert site_temp.storage.open("content.json").read() != site.storage.open("content.json").read()
-
- # Generate diff
- diffs = site.content_manager.getDiffs("content.json")
-
- # Publish with patch
- site.log.info("Publish new content.json bigger than 10MB")
- with Spy.Spy(FileRequest, "route") as requests:
- site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
- assert site.storage.getSize("content.json") > 10 * 1024 * 1024 # verify it over 10MB
- time.sleep(0.1)
- site.publish(diffs=diffs)
- assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
-
- assert site_temp.storage.getSize("content.json") < site_temp.getSizeLimit() * 1024 * 1024
- assert site_temp.storage.open("content.json").read() == site.storage.open("content.json").read()
-
- def testUnicodeFilename(self, file_server, site, site_temp):
- assert site.storage.directory == config.data_dir + "/" + site.address
- assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
-
- # Init source server
- site.connection_server = file_server
- file_server.sites[site.address] = site
-
- # Init client server
- client = FileServer(file_server.ip, 1545)
- client.sites = {site_temp.address: site_temp}
- site_temp.connection_server = client
- site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
-
- site_temp.addPeer(file_server.ip, 1544)
-
- assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
-
- site.storage.write("data/img/árvíztűrő.png", b"test")
-
- site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
-
- content = site.storage.loadJson("content.json")
- assert "data/img/árvíztűrő.png" in content["files"]
- assert not site_temp.storage.isFile("data/img/árvíztűrő.png")
- settings_before = site_temp.settings
-
- with Spy.Spy(FileRequest, "route") as requests:
- site.publish()
- time.sleep(0.1)
- assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
- assert len([req[1] for req in requests if req[1] == "streamFile"]) == 1
-
- content = site_temp.storage.loadJson("content.json")
- assert "data/img/árvíztűrő.png" in content["files"]
- assert site_temp.storage.isFile("data/img/árvíztűrő.png")
-
- assert site_temp.settings["size"] == settings_before["size"]
- assert site_temp.settings["size_optional"] == settings_before["size_optional"]
-
- assert site_temp.storage.deleteFiles()
- [connection.close() for connection in file_server.connections]
diff --git a/src/Test/TestSiteStorage.py b/src/Test/TestSiteStorage.py
deleted file mode 100644
index f11262bf..00000000
--- a/src/Test/TestSiteStorage.py
+++ /dev/null
@@ -1,25 +0,0 @@
-import pytest
-
-
-@pytest.mark.usefixtures("resetSettings")
-class TestSiteStorage:
- def testWalk(self, site):
- # Rootdir
- walk_root = list(site.storage.walk(""))
- assert "content.json" in walk_root
- assert "css/all.css" in walk_root
-
- # Subdir
- assert list(site.storage.walk("data-default")) == ["data.json", "users/content-default.json"]
-
- def testList(self, site):
- # Rootdir
- list_root = list(site.storage.list(""))
- assert "content.json" in list_root
- assert "css/all.css" not in list_root
-
- # Subdir
- assert set(site.storage.list("data-default")) == set(["data.json", "users"])
-
- def testDbRebuild(self, site):
- assert site.storage.rebuildDb()
diff --git a/src/Test/TestThreadPool.py b/src/Test/TestThreadPool.py
deleted file mode 100644
index 5e95005e..00000000
--- a/src/Test/TestThreadPool.py
+++ /dev/null
@@ -1,163 +0,0 @@
-import time
-import threading
-
-import gevent
-import pytest
-
-from util import ThreadPool
-
-
-class TestThreadPool:
- def testExecutionOrder(self):
- with ThreadPool.ThreadPool(4) as pool:
- events = []
-
- @pool.wrap
- def blocker():
- events.append("S")
- out = 0
- for i in range(10000000):
- if i == 3000000:
- events.append("M")
- out += 1
- events.append("D")
- return out
-
- threads = []
- for i in range(3):
- threads.append(gevent.spawn(blocker))
- gevent.joinall(threads)
-
- assert events == ["S"] * 3 + ["M"] * 3 + ["D"] * 3
-
- res = blocker()
- assert res == 10000000
-
- def testLockBlockingSameThread(self):
- lock = ThreadPool.Lock()
-
- s = time.time()
-
- def unlocker():
- time.sleep(1)
- lock.release()
-
- gevent.spawn(unlocker)
- lock.acquire(True)
- lock.acquire(True, timeout=2)
-
- unlock_taken = time.time() - s
-
- assert 1.0 < unlock_taken < 1.5
-
- def testLockBlockingDifferentThread(self):
- lock = ThreadPool.Lock()
-
- def locker():
- lock.acquire(True)
- time.sleep(0.5)
- lock.release()
-
- with ThreadPool.ThreadPool(10) as pool:
- threads = [
- pool.spawn(locker),
- pool.spawn(locker),
- gevent.spawn(locker),
- pool.spawn(locker)
- ]
- time.sleep(0.1)
-
- s = time.time()
-
- lock.acquire(True, 5.0)
-
- unlock_taken = time.time() - s
-
- assert 1.8 < unlock_taken < 2.2
-
- gevent.joinall(threads)
-
- def testMainLoopCallerThreadId(self):
- main_thread_id = threading.current_thread().ident
- with ThreadPool.ThreadPool(5) as pool:
- def getThreadId(*args, **kwargs):
- return threading.current_thread().ident
-
- t = pool.spawn(getThreadId)
- assert t.get() != main_thread_id
-
- t = pool.spawn(lambda: ThreadPool.main_loop.call(getThreadId))
- assert t.get() == main_thread_id
-
- def testMainLoopCallerGeventSpawn(self):
- main_thread_id = threading.current_thread().ident
- with ThreadPool.ThreadPool(5) as pool:
- def waiter():
- time.sleep(1)
- return threading.current_thread().ident
-
- def geventSpawner():
- event = ThreadPool.main_loop.call(gevent.spawn, waiter)
-
- with pytest.raises(Exception) as greenlet_err:
- event.get()
- assert str(greenlet_err.value) == "cannot switch to a different thread"
-
- waiter_thread_id = ThreadPool.main_loop.call(event.get)
- return waiter_thread_id
-
- s = time.time()
- waiter_thread_id = pool.apply(geventSpawner)
- assert main_thread_id == waiter_thread_id
- time_taken = time.time() - s
- assert 0.9 < time_taken < 1.2
-
- def testEvent(self):
- with ThreadPool.ThreadPool(5) as pool:
- event = ThreadPool.Event()
-
- def setter():
- time.sleep(1)
- event.set("done!")
-
- def getter():
- return event.get()
-
- pool.spawn(setter)
- t_gevent = gevent.spawn(getter)
- t_pool = pool.spawn(getter)
- s = time.time()
- assert event.get() == "done!"
- time_taken = time.time() - s
- gevent.joinall([t_gevent, t_pool])
-
- assert t_gevent.get() == "done!"
- assert t_pool.get() == "done!"
-
- assert 0.9 < time_taken < 1.2
-
- with pytest.raises(Exception) as err:
- event.set("another result")
-
- assert "Event already has value" in str(err.value)
-
- def testMemoryLeak(self):
- import gc
- thread_objs_before = [id(obj) for obj in gc.get_objects() if "threadpool" in str(type(obj))]
-
- def worker():
- time.sleep(0.1)
- return "ok"
-
- def poolTest():
- with ThreadPool.ThreadPool(5) as pool:
- for i in range(20):
- pool.spawn(worker)
-
- for i in range(5):
- poolTest()
- new_thread_objs = [obj for obj in gc.get_objects() if "threadpool" in str(type(obj)) and id(obj) not in thread_objs_before]
- #print("New objs:", new_thread_objs, "run:", num_run)
-
- # Make sure no threadpool object left behind
- assert not new_thread_objs
diff --git a/src/Test/TestTor.py b/src/Test/TestTor.py
deleted file mode 100644
index 0252d73a..00000000
--- a/src/Test/TestTor.py
+++ /dev/null
@@ -1,153 +0,0 @@
-import time
-
-import pytest
-import mock
-
-from File import FileServer
-from Crypt import CryptRsa
-from Config import config
-
-@pytest.mark.usefixtures("resetSettings")
-@pytest.mark.usefixtures("resetTempSettings")
-class TestTor:
- def testDownload(self, tor_manager):
- for retry in range(15):
- time.sleep(1)
- if tor_manager.enabled and tor_manager.conn:
- break
- assert tor_manager.enabled
-
- def testManagerConnection(self, tor_manager):
- assert "250-version" in tor_manager.request("GETINFO version")
-
- def testAddOnion(self, tor_manager):
- # Add
- address = tor_manager.addOnion()
- assert address
- assert address in tor_manager.privatekeys
-
- # Delete
- assert tor_manager.delOnion(address)
- assert address not in tor_manager.privatekeys
-
- def testSignOnion(self, tor_manager):
- address = tor_manager.addOnion()
-
- # Sign
- sign = CryptRsa.sign(b"hello", tor_manager.getPrivatekey(address))
- assert len(sign) == 128
-
- # Verify
- publickey = CryptRsa.privatekeyToPublickey(tor_manager.getPrivatekey(address))
- assert len(publickey) == 140
- assert CryptRsa.verify(b"hello", publickey, sign)
- assert not CryptRsa.verify(b"not hello", publickey, sign)
-
- # Pub to address
- assert CryptRsa.publickeyToOnion(publickey) == address
-
- # Delete
- tor_manager.delOnion(address)
-
- @pytest.mark.slow
- def testConnection(self, tor_manager, file_server, site, site_temp):
- file_server.tor_manager.start_onions = True
- address = file_server.tor_manager.getOnion(site.address)
- assert address
- print("Connecting to", address)
- for retry in range(5): # Wait for hidden service creation
- time.sleep(10)
- try:
- connection = file_server.getConnection(address + ".onion", 1544)
- if connection:
- break
- except Exception as err:
- continue
- assert connection.handshake
- assert not connection.handshake["peer_id"] # No peer_id for Tor connections
-
- # Return the same connection without site specified
- assert file_server.getConnection(address + ".onion", 1544) == connection
- # No reuse for different site
- assert file_server.getConnection(address + ".onion", 1544, site=site) != connection
- assert file_server.getConnection(address + ".onion", 1544, site=site) == file_server.getConnection(address + ".onion", 1544, site=site)
- site_temp.address = "1OTHERSITE"
- assert file_server.getConnection(address + ".onion", 1544, site=site) != file_server.getConnection(address + ".onion", 1544, site=site_temp)
-
- # Only allow to query from the locked site
- file_server.sites[site.address] = site
- connection_locked = file_server.getConnection(address + ".onion", 1544, site=site)
- assert "body" in connection_locked.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0})
- assert connection_locked.request("getFile", {"site": "1OTHERSITE", "inner_path": "content.json", "location": 0})["error"] == "Invalid site"
-
- def testPex(self, file_server, site, site_temp):
- # Register site to currently running fileserver
- site.connection_server = file_server
- file_server.sites[site.address] = site
- # Create a new file server to emulate new peer connecting to our peer
- file_server_temp = FileServer(file_server.ip, 1545)
- site_temp.connection_server = file_server_temp
- file_server_temp.sites[site_temp.address] = site_temp
-
- # We will request peers from this
- peer_source = site_temp.addPeer(file_server.ip, 1544)
-
- # Get ip4 peers from source site
- site.addPeer("1.2.3.4", 1555) # Add peer to source site
- assert peer_source.pex(need_num=10) == 1
- assert len(site_temp.peers) == 2
- assert "1.2.3.4:1555" in site_temp.peers
-
- # Get onion peers from source site
- site.addPeer("bka4ht2bzxchy44r.onion", 1555)
- assert "bka4ht2bzxchy44r.onion:1555" not in site_temp.peers
-
- # Don't add onion peers if not supported
- assert "onion" not in file_server_temp.supported_ip_types
- assert peer_source.pex(need_num=10) == 0
-
- file_server_temp.supported_ip_types.append("onion")
- assert peer_source.pex(need_num=10) == 1
-
- assert "bka4ht2bzxchy44r.onion:1555" in site_temp.peers
-
- def testFindHash(self, tor_manager, file_server, site, site_temp):
- file_server.ip_incoming = {} # Reset flood protection
- file_server.sites[site.address] = site
- file_server.tor_manager = tor_manager
-
- client = FileServer(file_server.ip, 1545)
- client.sites = {site_temp.address: site_temp}
- site_temp.connection_server = client
-
- # Add file_server as peer to client
- peer_file_server = site_temp.addPeer(file_server.ip, 1544)
-
- assert peer_file_server.findHashIds([1234]) == {}
-
- # Add fake peer with requred hash
- fake_peer_1 = site.addPeer("bka4ht2bzxchy44r.onion", 1544)
- fake_peer_1.hashfield.append(1234)
- fake_peer_2 = site.addPeer("1.2.3.5", 1545)
- fake_peer_2.hashfield.append(1234)
- fake_peer_2.hashfield.append(1235)
- fake_peer_3 = site.addPeer("1.2.3.6", 1546)
- fake_peer_3.hashfield.append(1235)
- fake_peer_3.hashfield.append(1236)
-
- res = peer_file_server.findHashIds([1234, 1235])
-
- assert sorted(res[1234]) == [('1.2.3.5', 1545), ("bka4ht2bzxchy44r.onion", 1544)]
- assert sorted(res[1235]) == [('1.2.3.5', 1545), ('1.2.3.6', 1546)]
-
- # Test my address adding
- site.content_manager.hashfield.append(1234)
-
- res = peer_file_server.findHashIds([1234, 1235])
- assert sorted(res[1234]) == [('1.2.3.5', 1545), (file_server.ip, 1544), ("bka4ht2bzxchy44r.onion", 1544)]
- assert sorted(res[1235]) == [('1.2.3.5', 1545), ('1.2.3.6', 1546)]
-
- def testSiteOnion(self, tor_manager):
- with mock.patch.object(config, "tor", "always"):
- assert tor_manager.getOnion("address1") != tor_manager.getOnion("address2")
- assert tor_manager.getOnion("address1") == tor_manager.getOnion("address1")
diff --git a/src/Test/TestTranslate.py b/src/Test/TestTranslate.py
deleted file mode 100644
index 348a65a6..00000000
--- a/src/Test/TestTranslate.py
+++ /dev/null
@@ -1,61 +0,0 @@
-from Translate import Translate
-
-class TestTranslate:
- def testTranslateStrict(self):
- translate = Translate()
- data = """
- translated = _("original")
- not_translated = "original"
- """
- data_translated = translate.translateData(data, {"_(original)": "translated"})
- assert 'translated = _("translated")' in data_translated
- assert 'not_translated = "original"' in data_translated
-
- def testTranslateStrictNamed(self):
- translate = Translate()
- data = """
- translated = _("original", "original named")
- translated_other = _("original", "original other named")
- not_translated = "original"
- """
- data_translated = translate.translateData(data, {"_(original, original named)": "translated"})
- assert 'translated = _("translated")' in data_translated
- assert 'not_translated = "original"' in data_translated
-
- def testTranslateUtf8(self):
- translate = Translate()
- data = """
- greeting = "Hi again árvztűrőtökörfúrógép!"
- """
- data_translated = translate.translateData(data, {"Hi again árvztűrőtökörfúrógép!": "Üdv újra árvztűrőtökörfúrógép!"})
- assert data_translated == """
- greeting = "Üdv újra árvztűrőtökörfúrógép!"
- """
-
- def testTranslateEscape(self):
- _ = Translate()
- _["Hello"] = "Szia"
-
- # Simple escaping
- data = "{_[Hello]} {username}!"
- username = "Hacker"
- data_translated = _(data)
- assert 'Szia' in data_translated
- assert '<' not in data_translated
- assert data_translated == "Szia Hacker<script>alert('boom')</script>!"
-
- # Escaping dicts
- user = {"username": "Hacker"}
- data = "{_[Hello]} {user[username]}!"
- data_translated = _(data)
- assert 'Szia' in data_translated
- assert '<' not in data_translated
- assert data_translated == "Szia Hacker<script>alert('boom')</script>!"
-
- # Escaping lists
- users = [{"username": "Hacker"}]
- data = "{_[Hello]} {users[0][username]}!"
- data_translated = _(data)
- assert 'Szia' in data_translated
- assert '<' not in data_translated
- assert data_translated == "Szia Hacker<script>alert('boom')</script>!"
diff --git a/src/Test/TestUiWebsocket.py b/src/Test/TestUiWebsocket.py
deleted file mode 100644
index d2d23d03..00000000
--- a/src/Test/TestUiWebsocket.py
+++ /dev/null
@@ -1,11 +0,0 @@
-import sys
-import pytest
-
-@pytest.mark.usefixtures("resetSettings")
-class TestUiWebsocket:
- def testPermission(self, ui_websocket):
- res = ui_websocket.testAction("ping")
- assert res == "pong"
-
- res = ui_websocket.testAction("certList")
- assert "You don't have permission" in res["error"]
diff --git a/src/Test/TestUpnpPunch.py b/src/Test/TestUpnpPunch.py
deleted file mode 100644
index f17c77bd..00000000
--- a/src/Test/TestUpnpPunch.py
+++ /dev/null
@@ -1,274 +0,0 @@
-import socket
-from urllib.parse import urlparse
-
-import pytest
-import mock
-
-from util import UpnpPunch as upnp
-
-
-@pytest.fixture
-def mock_socket():
- mock_socket = mock.MagicMock()
- mock_socket.recv = mock.MagicMock(return_value=b'Hello')
- mock_socket.bind = mock.MagicMock()
- mock_socket.send_to = mock.MagicMock()
-
- return mock_socket
-
-
-@pytest.fixture
-def url_obj():
- return urlparse('http://192.168.1.1/ctrlPoint.xml')
-
-
-@pytest.fixture(params=['WANPPPConnection', 'WANIPConnection'])
-def igd_profile(request):
- return """
- urn:schemas-upnp-org:service:{}:1
- urn:upnp-org:serviceId:wanpppc:pppoa
- /upnp/control/wanpppcpppoa
- /upnp/event/wanpppcpppoa
- /WANPPPConnection.xml
- """.format(request.param)
-
-
-@pytest.fixture
-def httplib_response():
- class FakeResponse(object):
- def __init__(self, status=200, body='OK'):
- self.status = status
- self.body = body
-
- def read(self):
- return self.body
- return FakeResponse
-
-
-class TestUpnpPunch(object):
- def test_perform_m_search(self, mock_socket):
- local_ip = '127.0.0.1'
-
- with mock.patch('util.UpnpPunch.socket.socket',
- return_value=mock_socket):
- result = upnp.perform_m_search(local_ip)
- assert result == 'Hello'
- assert local_ip == mock_socket.bind.call_args_list[0][0][0][0]
- assert ('239.255.255.250',
- 1900) == mock_socket.sendto.call_args_list[0][0][1]
-
- def test_perform_m_search_socket_error(self, mock_socket):
- mock_socket.recv.side_effect = socket.error('Timeout error')
-
- with mock.patch('util.UpnpPunch.socket.socket',
- return_value=mock_socket):
- with pytest.raises(upnp.UpnpError):
- upnp.perform_m_search('127.0.0.1')
-
- def test_retrieve_location_from_ssdp(self, url_obj):
- ctrl_location = url_obj.geturl()
- parsed_location = urlparse(ctrl_location)
- rsp = ('auth: gibberish\r\nlocation: {0}\r\n'
- 'Content-Type: text/html\r\n\r\n').format(ctrl_location)
- result = upnp._retrieve_location_from_ssdp(rsp)
- assert result == parsed_location
-
- def test_retrieve_location_from_ssdp_no_header(self):
- rsp = 'auth: gibberish\r\nContent-Type: application/json\r\n\r\n'
- with pytest.raises(upnp.IGDError):
- upnp._retrieve_location_from_ssdp(rsp)
-
- def test_retrieve_igd_profile(self, url_obj):
- with mock.patch('urllib.request.urlopen') as mock_urlopen:
- upnp._retrieve_igd_profile(url_obj)
- mock_urlopen.assert_called_with(url_obj.geturl(), timeout=5)
-
- def test_retrieve_igd_profile_timeout(self, url_obj):
- with mock.patch('urllib.request.urlopen') as mock_urlopen:
- mock_urlopen.side_effect = socket.error('Timeout error')
- with pytest.raises(upnp.IGDError):
- upnp._retrieve_igd_profile(url_obj)
-
- def test_parse_igd_profile_service_type(self, igd_profile):
- control_path, upnp_schema = upnp._parse_igd_profile(igd_profile)
- assert control_path == '/upnp/control/wanpppcpppoa'
- assert upnp_schema in ('WANPPPConnection', 'WANIPConnection',)
-
- def test_parse_igd_profile_no_ctrlurl(self, igd_profile):
- igd_profile = igd_profile.replace('controlURL', 'nope')
- with pytest.raises(upnp.IGDError):
- control_path, upnp_schema = upnp._parse_igd_profile(igd_profile)
-
- def test_parse_igd_profile_no_schema(self, igd_profile):
- igd_profile = igd_profile.replace('Connection', 'nope')
- with pytest.raises(upnp.IGDError):
- control_path, upnp_schema = upnp._parse_igd_profile(igd_profile)
-
- def test_create_open_message_parsable(self):
- from xml.parsers.expat import ExpatError
- msg, _ = upnp._create_open_message('127.0.0.1', 8888)
- try:
- upnp.parseString(msg)
- except ExpatError as e:
- pytest.fail('Incorrect XML message: {}'.format(e))
-
- def test_create_open_message_contains_right_stuff(self):
- settings = {'description': 'test desc',
- 'protocol': 'test proto',
- 'upnp_schema': 'test schema'}
- msg, fn_name = upnp._create_open_message('127.0.0.1', 8888, **settings)
- assert fn_name == 'AddPortMapping'
- assert '127.0.0.1' in msg
- assert '8888' in msg
- assert settings['description'] in msg
- assert settings['protocol'] in msg
- assert settings['upnp_schema'] in msg
-
- def test_parse_for_errors_bad_rsp(self, httplib_response):
- rsp = httplib_response(status=500)
- with pytest.raises(upnp.IGDError) as err:
- upnp._parse_for_errors(rsp)
- assert 'Unable to parse' in str(err.value)
-
- def test_parse_for_errors_error(self, httplib_response):
- soap_error = (''
- '500 '
- 'Bad request '
- ' ')
- rsp = httplib_response(status=500, body=soap_error)
- with pytest.raises(upnp.IGDError) as err:
- upnp._parse_for_errors(rsp)
- assert 'SOAP request error' in str(err.value)
-
- def test_parse_for_errors_good_rsp(self, httplib_response):
- rsp = httplib_response(status=200)
- assert rsp == upnp._parse_for_errors(rsp)
-
- def test_send_requests_success(self):
- with mock.patch(
- 'util.UpnpPunch._send_soap_request') as mock_send_request:
- mock_send_request.return_value = mock.MagicMock(status=200)
- upnp._send_requests(['msg'], None, None, None)
-
- assert mock_send_request.called
-
- def test_send_requests_failed(self):
- with mock.patch(
- 'util.UpnpPunch._send_soap_request') as mock_send_request:
- mock_send_request.return_value = mock.MagicMock(status=500)
- with pytest.raises(upnp.UpnpError):
- upnp._send_requests(['msg'], None, None, None)
-
- assert mock_send_request.called
-
- def test_collect_idg_data(self):
- pass
-
- @mock.patch('util.UpnpPunch._get_local_ips')
- @mock.patch('util.UpnpPunch._collect_idg_data')
- @mock.patch('util.UpnpPunch._send_requests')
- def test_ask_to_open_port_success(self, mock_send_requests,
- mock_collect_idg, mock_local_ips):
- mock_collect_idg.return_value = {'upnp_schema': 'schema-yo'}
- mock_local_ips.return_value = ['192.168.0.12']
-
- result = upnp.ask_to_open_port(retries=5)
-
- soap_msg = mock_send_requests.call_args[0][0][0][0]
-
- assert result is True
-
- assert mock_collect_idg.called
- assert '192.168.0.12' in soap_msg
- assert '15441' in soap_msg
- assert 'schema-yo' in soap_msg
-
- @mock.patch('util.UpnpPunch._get_local_ips')
- @mock.patch('util.UpnpPunch._collect_idg_data')
- @mock.patch('util.UpnpPunch._send_requests')
- def test_ask_to_open_port_failure(self, mock_send_requests,
- mock_collect_idg, mock_local_ips):
- mock_local_ips.return_value = ['192.168.0.12']
- mock_collect_idg.return_value = {'upnp_schema': 'schema-yo'}
- mock_send_requests.side_effect = upnp.UpnpError()
-
- with pytest.raises(upnp.UpnpError):
- upnp.ask_to_open_port()
-
- @mock.patch('util.UpnpPunch._collect_idg_data')
- @mock.patch('util.UpnpPunch._send_requests')
- def test_orchestrate_soap_request(self, mock_send_requests,
- mock_collect_idg):
- soap_mock = mock.MagicMock()
- args = ['127.0.0.1', 31337, soap_mock, 'upnp-test', {'upnp_schema':
- 'schema-yo'}]
- mock_collect_idg.return_value = args[-1]
-
- upnp._orchestrate_soap_request(*args[:-1])
-
- assert mock_collect_idg.called
- soap_mock.assert_called_with(
- *args[:2] + ['upnp-test', 'UDP', 'schema-yo'])
- assert mock_send_requests.called
-
- @mock.patch('util.UpnpPunch._collect_idg_data')
- @mock.patch('util.UpnpPunch._send_requests')
- def test_orchestrate_soap_request_without_desc(self, mock_send_requests,
- mock_collect_idg):
- soap_mock = mock.MagicMock()
- args = ['127.0.0.1', 31337, soap_mock, {'upnp_schema': 'schema-yo'}]
- mock_collect_idg.return_value = args[-1]
-
- upnp._orchestrate_soap_request(*args[:-1])
-
- assert mock_collect_idg.called
- soap_mock.assert_called_with(*args[:2] + [None, 'UDP', 'schema-yo'])
- assert mock_send_requests.called
-
- def test_create_close_message_parsable(self):
- from xml.parsers.expat import ExpatError
- msg, _ = upnp._create_close_message('127.0.0.1', 8888)
- try:
- upnp.parseString(msg)
- except ExpatError as e:
- pytest.fail('Incorrect XML message: {}'.format(e))
-
- def test_create_close_message_contains_right_stuff(self):
- settings = {'protocol': 'test proto',
- 'upnp_schema': 'test schema'}
- msg, fn_name = upnp._create_close_message('127.0.0.1', 8888, **
- settings)
- assert fn_name == 'DeletePortMapping'
- assert '8888' in msg
- assert settings['protocol'] in msg
- assert settings['upnp_schema'] in msg
-
- @mock.patch('util.UpnpPunch._get_local_ips')
- @mock.patch('util.UpnpPunch._orchestrate_soap_request')
- def test_communicate_with_igd_success(self, mock_orchestrate,
- mock_get_local_ips):
- mock_get_local_ips.return_value = ['192.168.0.12']
- upnp._communicate_with_igd()
- assert mock_get_local_ips.called
- assert mock_orchestrate.called
-
- @mock.patch('util.UpnpPunch._get_local_ips')
- @mock.patch('util.UpnpPunch._orchestrate_soap_request')
- def test_communicate_with_igd_succeed_despite_single_failure(
- self, mock_orchestrate, mock_get_local_ips):
- mock_get_local_ips.return_value = ['192.168.0.12']
- mock_orchestrate.side_effect = [upnp.UpnpError, None]
- upnp._communicate_with_igd(retries=2)
- assert mock_get_local_ips.called
- assert mock_orchestrate.called
-
- @mock.patch('util.UpnpPunch._get_local_ips')
- @mock.patch('util.UpnpPunch._orchestrate_soap_request')
- def test_communicate_with_igd_total_failure(self, mock_orchestrate,
- mock_get_local_ips):
- mock_get_local_ips.return_value = ['192.168.0.12']
- mock_orchestrate.side_effect = [upnp.UpnpError, upnp.IGDError]
- with pytest.raises(upnp.UpnpError):
- upnp._communicate_with_igd(retries=2)
- assert mock_get_local_ips.called
- assert mock_orchestrate.called
diff --git a/src/Test/TestUser.py b/src/Test/TestUser.py
deleted file mode 100644
index e5ec5c8c..00000000
--- a/src/Test/TestUser.py
+++ /dev/null
@@ -1,50 +0,0 @@
-import pytest
-
-from Crypt import CryptBitcoin
-
-
-@pytest.mark.usefixtures("resetSettings")
-class TestUser:
- def testAddress(self, user):
- assert user.master_address == "15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc"
- address_index = 1458664252141532163166741013621928587528255888800826689784628722366466547364755811
- assert user.getAddressAuthIndex("15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc") == address_index
-
- # Re-generate privatekey based on address_index
- def testNewSite(self, user):
- address, address_index, site_data = user.getNewSiteData() # Create a new random site
- assert CryptBitcoin.hdPrivatekey(user.master_seed, address_index) == site_data["privatekey"]
-
- user.sites = {} # Reset user data
-
- # Site address and auth address is different
- assert user.getSiteData(address)["auth_address"] != address
- # Re-generate auth_privatekey for site
- assert user.getSiteData(address)["auth_privatekey"] == site_data["auth_privatekey"]
-
- def testAuthAddress(self, user):
- # Auth address without Cert
- auth_address = user.getAuthAddress("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr")
- assert auth_address == "1MyJgYQjeEkR9QD66nkfJc9zqi9uUy5Lr2"
- auth_privatekey = user.getAuthPrivatekey("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr")
- assert CryptBitcoin.privatekeyToAddress(auth_privatekey) == auth_address
-
- def testCert(self, user):
- cert_auth_address = user.getAuthAddress("1iD5ZQJMNXu43w1qLB8sfdHVKppVMduGz") # Add site to user's registry
- # Add cert
- user.addCert(cert_auth_address, "zeroid.bit", "faketype", "fakeuser", "fakesign")
- user.setCert("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr", "zeroid.bit")
-
- # By using certificate the auth address should be same as the certificate provider
- assert user.getAuthAddress("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr") == cert_auth_address
- auth_privatekey = user.getAuthPrivatekey("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr")
- assert CryptBitcoin.privatekeyToAddress(auth_privatekey) == cert_auth_address
-
- # Test delete site data
- assert "1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr" in user.sites
- user.deleteSiteData("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr")
- assert "1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr" not in user.sites
-
- # Re-create add site should generate normal, unique auth_address
- assert not user.getAuthAddress("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr") == cert_auth_address
- assert user.getAuthAddress("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr") == "1MyJgYQjeEkR9QD66nkfJc9zqi9uUy5Lr2"
diff --git a/src/Test/TestWeb.py b/src/Test/TestWeb.py
deleted file mode 100644
index 2ce66c98..00000000
--- a/src/Test/TestWeb.py
+++ /dev/null
@@ -1,105 +0,0 @@
-import urllib.request
-
-import pytest
-
-try:
- from selenium.webdriver.support.ui import WebDriverWait
- from selenium.webdriver.support.expected_conditions import staleness_of, title_is
- from selenium.common.exceptions import NoSuchElementException
-except:
- pass
-
-
-class WaitForPageLoad(object):
- def __init__(self, browser):
- self.browser = browser
-
- def __enter__(self):
- self.old_page = self.browser.find_element_by_tag_name('html')
-
- def __exit__(self, *args):
- WebDriverWait(self.browser, 10).until(staleness_of(self.old_page))
-
-
-def getContextUrl(browser):
- return browser.execute_script("return window.location.toString()")
-
-
-def getUrl(url):
- content = urllib.request.urlopen(url).read()
- assert "server error" not in content.lower(), "Got a server error! " + repr(url)
- return content
-
-@pytest.mark.usefixtures("resetSettings")
-@pytest.mark.webtest
-class TestWeb:
- def testFileSecurity(self, site_url):
- assert "Not Found" in getUrl("%s/media/sites.json" % site_url)
- assert "Forbidden" in getUrl("%s/media/./sites.json" % site_url)
- assert "Forbidden" in getUrl("%s/media/../config.py" % site_url)
- assert "Forbidden" in getUrl("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url)
- assert "Forbidden" in getUrl("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url)
- assert "Forbidden" in getUrl("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url)
-
- assert "Not Found" in getUrl("%s/raw/sites.json" % site_url)
- assert "Forbidden" in getUrl("%s/raw/./sites.json" % site_url)
- assert "Forbidden" in getUrl("%s/raw/../config.py" % site_url)
- assert "Forbidden" in getUrl("%s/raw/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url)
- assert "Forbidden" in getUrl("%s/raw/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url)
- assert "Forbidden" in getUrl("%s/raw/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url)
-
- assert "Forbidden" in getUrl("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url)
- assert "Forbidden" in getUrl("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url)
- assert "Forbidden" in getUrl("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url)
-
- assert "Forbidden" in getUrl("%s/content.db" % site_url)
- assert "Forbidden" in getUrl("%s/./users.json" % site_url)
- assert "Forbidden" in getUrl("%s/./key-rsa.pem" % site_url)
- assert "Forbidden" in getUrl("%s/././././././././././//////sites.json" % site_url)
-
- def testLinkSecurity(self, browser, site_url):
- browser.get("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/test/security.html" % site_url)
- WebDriverWait(browser, 10).until(title_is("ZeroHello - ZeroNet"))
- assert getContextUrl(browser) == "%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/test/security.html" % site_url
-
- # Switch to inner frame
- browser.switch_to.frame(browser.find_element_by_id("inner-iframe"))
- assert "wrapper_nonce" in getContextUrl(browser)
- assert browser.find_element_by_id("script_output").text == "Result: Works"
- browser.switch_to.default_content()
-
- # Clicking on links without target
- browser.switch_to.frame(browser.find_element_by_id("inner-iframe"))
- with WaitForPageLoad(browser):
- browser.find_element_by_id("link_to_current").click()
- assert "wrapper_nonce" not in getContextUrl(browser) # The browser object back to default content
- assert "Forbidden" not in browser.page_source
- # Check if we have frame inside frame
- browser.switch_to.frame(browser.find_element_by_id("inner-iframe"))
- with pytest.raises(NoSuchElementException):
- assert not browser.find_element_by_id("inner-iframe")
- browser.switch_to.default_content()
-
- # Clicking on link with target=_top
- browser.switch_to.frame(browser.find_element_by_id("inner-iframe"))
- with WaitForPageLoad(browser):
- browser.find_element_by_id("link_to_top").click()
- assert "wrapper_nonce" not in getContextUrl(browser) # The browser object back to default content
- assert "Forbidden" not in browser.page_source
- browser.switch_to.default_content()
-
- # Try to escape from inner_frame
- browser.switch_to.frame(browser.find_element_by_id("inner-iframe"))
- assert "wrapper_nonce" in getContextUrl(browser) # Make sure we are inside of the inner-iframe
- with WaitForPageLoad(browser):
- browser.execute_script("window.top.location = window.location")
- assert "wrapper_nonce" in getContextUrl(browser) # We try to use nonce-ed html without iframe
- assert "
"+escape(e.message+"",true)+" "}throw e}}marked.options=marked.setOptions=function(opt){merge(marked.defaults,opt);return marked};marked.defaults={gfm:true,tables:true,breaks:false,pedantic:false,sanitize:false,smartLists:false,silent:false,highlight:null,langPrefix:"lang-",smartypants:false,headerPrefix:"",renderer:new Renderer,xhtml:false};marked.Parser=Parser;marked.parser=Parser.parse;marked.Renderer=Renderer;marked.Lexer=Lexer;marked.lexer=Lexer.lex;marked.InlineLexer=InlineLexer;marked.inlineLexer=InlineLexer.output;marked.parse=marked;if(typeof module!=="undefined"&&typeof exports==="object"){module.exports=marked}else if(typeof define==="function"&&define.amd){define(function(){return marked})}else{this.marked=marked}}).call(function(){return this||(typeof window!=="undefined"?window:global)}());
-
-
-/* ---- data/1BLogC9LN4oPDcruNz3qo1ysa133E9AGg8/js/lib/pnglib.js ---- */
-
-
-/**
-* A handy class to calculate color values.
-*
-* @version 1.0
-* @author Robert Eisele
-* @copyright Copyright (c) 2010, Robert Eisele
-* @link http://www.xarg.org/2010/03/generate-client-side-png-files-using-javascript/
-* @license http://www.opensource.org/licenses/bsd-license.php BSD License
-*
-*/
-
-(function() {
-
- // helper functions for that ctx
- function write(buffer, offs) {
- for (var i = 2; i < arguments.length; i++) {
- for (var j = 0; j < arguments[i].length; j++) {
- buffer[offs++] = arguments[i].charAt(j);
- }
- }
- }
-
- function byte2(w) {
- return String.fromCharCode((w >> 8) & 255, w & 255);
- }
-
- function byte4(w) {
- return String.fromCharCode((w >> 24) & 255, (w >> 16) & 255, (w >> 8) & 255, w & 255);
- }
-
- function byte2lsb(w) {
- return String.fromCharCode(w & 255, (w >> 8) & 255);
- }
-
- window.PNGlib = function(width,height,depth) {
-
- this.width = width;
- this.height = height;
- this.depth = depth;
-
- // pixel data and row filter identifier size
- this.pix_size = height * (width + 1);
-
- // deflate header, pix_size, block headers, adler32 checksum
- this.data_size = 2 + this.pix_size + 5 * Math.floor((0xfffe + this.pix_size) / 0xffff) + 4;
-
- // offsets and sizes of Png chunks
- this.ihdr_offs = 0; // IHDR offset and size
- this.ihdr_size = 4 + 4 + 13 + 4;
- this.plte_offs = this.ihdr_offs + this.ihdr_size; // PLTE offset and size
- this.plte_size = 4 + 4 + 3 * depth + 4;
- this.trns_offs = this.plte_offs + this.plte_size; // tRNS offset and size
- this.trns_size = 4 + 4 + depth + 4;
- this.idat_offs = this.trns_offs + this.trns_size; // IDAT offset and size
- this.idat_size = 4 + 4 + this.data_size + 4;
- this.iend_offs = this.idat_offs + this.idat_size; // IEND offset and size
- this.iend_size = 4 + 4 + 4;
- this.buffer_size = this.iend_offs + this.iend_size; // total PNG size
-
- this.buffer = new Array();
- this.palette = new Object();
- this.pindex = 0;
-
- var _crc32 = new Array();
-
- // initialize buffer with zero bytes
- for (var i = 0; i < this.buffer_size; i++) {
- this.buffer[i] = "\x00";
- }
-
- // initialize non-zero elements
- write(this.buffer, this.ihdr_offs, byte4(this.ihdr_size - 12), 'IHDR', byte4(width), byte4(height), "\x08\x03");
- write(this.buffer, this.plte_offs, byte4(this.plte_size - 12), 'PLTE');
- write(this.buffer, this.trns_offs, byte4(this.trns_size - 12), 'tRNS');
- write(this.buffer, this.idat_offs, byte4(this.idat_size - 12), 'IDAT');
- write(this.buffer, this.iend_offs, byte4(this.iend_size - 12), 'IEND');
-
- // initialize deflate header
- var header = ((8 + (7 << 4)) << 8) | (3 << 6);
- header+= 31 - (header % 31);
-
- write(this.buffer, this.idat_offs + 8, byte2(header));
-
- // initialize deflate block headers
- for (var i = 0; (i << 16) - 1 < this.pix_size; i++) {
- var size, bits;
- if (i + 0xffff < this.pix_size) {
- size = 0xffff;
- bits = "\x00";
- } else {
- size = this.pix_size - (i << 16) - i;
- bits = "\x01";
- }
- write(this.buffer, this.idat_offs + 8 + 2 + (i << 16) + (i << 2), bits, byte2lsb(size), byte2lsb(~size));
- }
-
- /* Create crc32 lookup table */
- for (var i = 0; i < 256; i++) {
- var c = i;
- for (var j = 0; j < 8; j++) {
- if (c & 1) {
- c = -306674912 ^ ((c >> 1) & 0x7fffffff);
- } else {
- c = (c >> 1) & 0x7fffffff;
- }
- }
- _crc32[i] = c;
- }
-
- // compute the index into a png for a given pixel
- this.index = function(x,y) {
- var i = y * (this.width + 1) + x + 1;
- var j = this.idat_offs + 8 + 2 + 5 * Math.floor((i / 0xffff) + 1) + i;
- return j;
- }
-
- // convert a color and build up the palette
- this.color = function(red, green, blue, alpha) {
-
- alpha = alpha >= 0 ? alpha : 255;
- var color = (((((alpha << 8) | red) << 8) | green) << 8) | blue;
-
- if (typeof this.palette[color] == "undefined") {
- if (this.pindex == this.depth) return "\x00";
-
- var ndx = this.plte_offs + 8 + 3 * this.pindex;
-
- this.buffer[ndx + 0] = String.fromCharCode(red);
- this.buffer[ndx + 1] = String.fromCharCode(green);
- this.buffer[ndx + 2] = String.fromCharCode(blue);
- this.buffer[this.trns_offs+8+this.pindex] = String.fromCharCode(alpha);
-
- this.palette[color] = String.fromCharCode(this.pindex++);
- }
- return this.palette[color];
- }
-
- // output a PNG string, Base64 encoded
- this.getBase64 = function() {
-
- var s = this.getDump();
-
- var ch = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";
- var c1, c2, c3, e1, e2, e3, e4;
- var l = s.length;
- var i = 0;
- var r = "";
-
- do {
- c1 = s.charCodeAt(i);
- e1 = c1 >> 2;
- c2 = s.charCodeAt(i+1);
- e2 = ((c1 & 3) << 4) | (c2 >> 4);
- c3 = s.charCodeAt(i+2);
- if (l < i+2) { e3 = 64; } else { e3 = ((c2 & 0xf) << 2) | (c3 >> 6); }
- if (l < i+3) { e4 = 64; } else { e4 = c3 & 0x3f; }
- r+= ch.charAt(e1) + ch.charAt(e2) + ch.charAt(e3) + ch.charAt(e4);
- } while ((i+= 3) < l);
- return r;
- }
-
- // output a PNG string
- this.getDump = function() {
-
- // compute adler32 of output pixels + row filter bytes
- var BASE = 65521; /* largest prime smaller than 65536 */
- var NMAX = 5552; /* NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1 */
- var s1 = 1;
- var s2 = 0;
- var n = NMAX;
-
- for (var y = 0; y < this.height; y++) {
- for (var x = -1; x < this.width; x++) {
- s1+= this.buffer[this.index(x, y)].charCodeAt(0);
- s2+= s1;
- if ((n-= 1) == 0) {
- s1%= BASE;
- s2%= BASE;
- n = NMAX;
- }
- }
- }
- s1%= BASE;
- s2%= BASE;
- write(this.buffer, this.idat_offs + this.idat_size - 8, byte4((s2 << 16) | s1));
-
- // compute crc32 of the PNG chunks
- function crc32(png, offs, size) {
- var crc = -1;
- for (var i = 4; i < size-4; i += 1) {
- crc = _crc32[(crc ^ png[offs+i].charCodeAt(0)) & 0xff] ^ ((crc >> 8) & 0x00ffffff);
- }
- write(png, offs+size-4, byte4(crc ^ -1));
- }
-
- crc32(this.buffer, this.ihdr_offs, this.ihdr_size);
- crc32(this.buffer, this.plte_offs, this.plte_size);
- crc32(this.buffer, this.trns_offs, this.trns_size);
- crc32(this.buffer, this.idat_offs, this.idat_size);
- crc32(this.buffer, this.iend_offs, this.iend_size);
-
- // convert PNG to string
- return "\211PNG\r\n\032\n"+this.buffer.join('');
- }
- }
-
-})();
-
-
-
-/* ---- data/1BLogC9LN4oPDcruNz3qo1ysa133E9AGg8/js/utils/Class.coffee ---- */
-
-
-(function() {
- var Class,
- __slice = [].slice;
-
- Class = (function() {
- function Class() {}
-
- Class.prototype.trace = true;
-
- Class.prototype.log = function() {
- var args;
- args = 1 <= arguments.length ? __slice.call(arguments, 0) : [];
- if (!this.trace) {
- return;
- }
- if (typeof console === 'undefined') {
- return;
- }
- args.unshift("[" + this.constructor.name + "]");
- console.log.apply(console, args);
- return this;
- };
-
- Class.prototype.logStart = function() {
- var args, name;
- name = arguments[0], args = 2 <= arguments.length ? __slice.call(arguments, 1) : [];
- if (!this.trace) {
- return;
- }
- this.logtimers || (this.logtimers = {});
- this.logtimers[name] = +(new Date);
- if (args.length > 0) {
- this.log.apply(this, ["" + name].concat(__slice.call(args), ["(started)"]));
- }
- return this;
- };
-
- Class.prototype.logEnd = function() {
- var args, ms, name;
- name = arguments[0], args = 2 <= arguments.length ? __slice.call(arguments, 1) : [];
- ms = +(new Date) - this.logtimers[name];
- this.log.apply(this, ["" + name].concat(__slice.call(args), ["(Done in " + ms + "ms)"]));
- return this;
- };
-
- return Class;
-
- })();
-
- window.Class = Class;
-
-}).call(this);
-
-
-
-/* ---- data/1BLogC9LN4oPDcruNz3qo1ysa133E9AGg8/js/utils/InlineEditor.coffee ---- */
-
-
-(function() {
- var InlineEditor,
- __bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; };
-
- InlineEditor = (function() {
- function InlineEditor(_at_elem, _at_getContent, _at_saveContent, _at_getObject) {
- this.elem = _at_elem;
- this.getContent = _at_getContent;
- this.saveContent = _at_saveContent;
- this.getObject = _at_getObject;
- this.cancelEdit = __bind(this.cancelEdit, this);
- this.deleteObject = __bind(this.deleteObject, this);
- this.saveEdit = __bind(this.saveEdit, this);
- this.stopEdit = __bind(this.stopEdit, this);
- this.startEdit = __bind(this.startEdit, this);
- this.edit_button = $(" ");
- this.edit_button.on("click", this.startEdit);
- this.elem.addClass("editable").before(this.edit_button);
- this.editor = null;
- this.elem.on("mouseenter", (function(_this) {
- return function(e) {
- var scrolltop, top;
- _this.edit_button.css("opacity", "0.4");
- scrolltop = $(window).scrollTop();
- top = _this.edit_button.offset().top - parseInt(_this.edit_button.css("margin-top"));
- if (scrolltop > top) {
- return _this.edit_button.css("margin-top", scrolltop - top + e.clientY - 20);
- } else {
- return _this.edit_button.css("margin-top", "");
- }
- };
- })(this));
- this.elem.on("mouseleave", (function(_this) {
- return function() {
- return _this.edit_button.css("opacity", "");
- };
- })(this));
- if (this.elem.is(":hover")) {
- this.elem.trigger("mouseenter");
- }
- }
-
- InlineEditor.prototype.startEdit = function() {
- var _i, _results;
- this.content_before = this.elem.html();
- this.editor = $("");
- this.editor.css("outline", "10000px solid rgba(255,255,255,0)").cssLater("transition", "outline 0.3s", 5).cssLater("outline", "10000px solid rgba(255,255,255,0.9)", 10);
- this.editor.val(this.getContent(this.elem, "raw"));
- this.elem.after(this.editor);
- this.elem.html((function() {
- _results = [];
- for (_i = 1; _i <= 50; _i++){ _results.push(_i); }
- return _results;
- }).apply(this).join("fill the width"));
- this.copyStyle(this.elem, this.editor);
- this.elem.html(this.content_before);
- this.autoExpand(this.editor);
- this.elem.css("display", "none");
- if ($(window).scrollTop() === 0) {
- this.editor[0].selectionEnd = 0;
- this.editor.focus();
- }
- $(".editable-edit").css("display", "none");
- $(".editbar").css("display", "inline-block").addClassLater("visible", 10);
- $(".publishbar").css("opacity", 0);
- $(".editbar .object").text(this.getObject(this.elem).data("object") + "." + this.elem.data("editable"));
- $(".editbar .button").removeClass("loading");
- $(".editbar .save").off("click").on("click", this.saveEdit);
- $(".editbar .delete").off("click").on("click", this.deleteObject);
- $(".editbar .cancel").off("click").on("click", this.cancelEdit);
- if (this.getObject(this.elem).data("deletable")) {
- $(".editbar .delete").css("display", "").html("Delete " + this.getObject(this.elem).data("object").split(":")[0]);
- } else {
- $(".editbar .delete").css("display", "none");
- }
- window.onbeforeunload = function() {
- return 'Your unsaved blog changes will be lost!';
- };
- return false;
- };
-
- InlineEditor.prototype.stopEdit = function() {
- this.editor.remove();
- this.editor = null;
- this.elem.css("display", "");
- $(".editable-edit").css("display", "");
- $(".editbar").cssLater("display", "none", 1000).removeClass("visible");
- $(".publishbar").css("opacity", 1);
- return window.onbeforeunload = null;
- };
-
- InlineEditor.prototype.saveEdit = function() {
- var content;
- content = this.editor.val();
- $(".editbar .save").addClass("loading");
- this.saveContent(this.elem, content, (function(_this) {
- return function(content_html) {
- if (content_html) {
- $(".editbar .save").removeClass("loading");
- _this.stopEdit();
- if (typeof content_html === "string") {
- _this.elem.html(content_html);
- }
- return $('pre code').each(function(i, block) {
- return hljs.highlightBlock(block);
- });
- } else {
- return $(".editbar .save").removeClass("loading");
- }
- };
- })(this));
- return false;
- };
-
- InlineEditor.prototype.deleteObject = function() {
- var object_type;
- object_type = this.getObject(this.elem).data("object").split(":")[0];
- Page.cmd("wrapperConfirm", ["Are you sure you sure to delete this " + object_type + "?", "Delete"], (function(_this) {
- return function(confirmed) {
- $(".editbar .delete").addClass("loading");
- return Page.saveContent(_this.getObject(_this.elem), null, function() {
- return _this.stopEdit();
- });
- };
- })(this));
- return false;
- };
-
- InlineEditor.prototype.cancelEdit = function() {
- this.stopEdit();
- this.elem.html(this.content_before);
- $('pre code').each(function(i, block) {
- return hljs.highlightBlock(block);
- });
- return false;
- };
-
- InlineEditor.prototype.copyStyle = function(elem_from, elem_to) {
- var from_style;
- elem_to.addClass(elem_from[0].className);
- from_style = getComputedStyle(elem_from[0]);
- elem_to.css({
- fontFamily: from_style.fontFamily,
- fontSize: from_style.fontSize,
- fontWeight: from_style.fontWeight,
- marginTop: from_style.marginTop,
- marginRight: from_style.marginRight,
- marginBottom: from_style.marginBottom,
- marginLeft: from_style.marginLeft,
- paddingTop: from_style.paddingTop,
- paddingRight: from_style.paddingRight,
- paddingBottom: from_style.paddingBottom,
- paddingLeft: from_style.paddingLeft,
- lineHeight: from_style.lineHeight,
- textAlign: from_style.textAlign,
- color: from_style.color,
- letterSpacing: from_style.letterSpacing
- });
- if (elem_from.innerWidth() < 1000) {
- return elem_to.css("minWidth", elem_from.innerWidth());
- }
- };
-
- InlineEditor.prototype.autoExpand = function(elem) {
- var editor;
- editor = elem[0];
- elem.height(1);
- elem.on("input", function() {
- if (editor.scrollHeight > elem.height()) {
- return elem.height(1).height(editor.scrollHeight + parseFloat(elem.css("borderTopWidth")) + parseFloat(elem.css("borderBottomWidth")));
- }
- });
- elem.trigger("input");
- return elem.on('keydown', function(e) {
- var s, val;
- if (e.which === 9) {
- e.preventDefault();
- s = this.selectionStart;
- val = elem.val();
- elem.val(val.substring(0, this.selectionStart) + "\t" + val.substring(this.selectionEnd));
- return this.selectionEnd = s + 1;
- }
- });
- };
-
- return InlineEditor;
-
- })();
-
- window.InlineEditor = InlineEditor;
-
-}).call(this);
-
-
-
-/* ---- data/1BLogC9LN4oPDcruNz3qo1ysa133E9AGg8/js/utils/RateLimit.coffee ---- */
-
-
-(function() {
- var call_after_interval, limits;
-
- limits = {};
-
- call_after_interval = {};
-
- window.RateLimit = function(interval, fn) {
- if (!limits[fn]) {
- call_after_interval[fn] = false;
- fn();
- return limits[fn] = setTimeout((function() {
- if (call_after_interval[fn]) {
- fn();
- }
- delete limits[fn];
- return delete call_after_interval[fn];
- }), interval);
- } else {
- return call_after_interval[fn] = true;
- }
- };
-
-}).call(this);
-
-
-
-/* ---- data/1BLogC9LN4oPDcruNz3qo1ysa133E9AGg8/js/utils/Text.coffee ---- */
-
-
-(function() {
- var Renderer, Text,
- __extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; },
- __hasProp = {}.hasOwnProperty,
- __bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; };
-
- Renderer = (function(_super) {
- __extends(Renderer, _super);
-
- function Renderer() {
- return Renderer.__super__.constructor.apply(this, arguments);
- }
-
- Renderer.prototype.image = function(href, title, text) {
- return "![" + text + "](" + href + ")
";
- };
-
- return Renderer;
-
- })(marked.Renderer);
-
- Text = (function() {
- function Text() {
- this.toUrl = __bind(this.toUrl, this);
- }
-
- Text.prototype.toColor = function(text) {
- var color, hash, i, value, _i, _j, _ref;
- hash = 0;
- for (i = _i = 0, _ref = text.length - 1; 0 <= _ref ? _i <= _ref : _i >= _ref; i = 0 <= _ref ? ++_i : --_i) {
- hash = text.charCodeAt(i) + ((hash << 5) - hash);
- }
- color = '#';
- return "hsl(" + (hash % 360) + ",30%,50%)";
- for (i = _j = 0; _j <= 2; i = ++_j) {
- value = (hash >> (i * 8)) & 0xFF;
- color += ('00' + value.toString(16)).substr(-2);
- }
- return color;
- };
-
- Text.prototype.toMarked = function(text, options) {
- if (options == null) {
- options = {};
- }
- options["gfm"] = true;
- options["breaks"] = true;
- if (options.sanitize) {
- options["renderer"] = renderer;
- }
- text = marked(text, options);
- return this.fixHtmlLinks(text);
- };
-
- Text.prototype.fixHtmlLinks = function(text) {
- if (window.is_proxy) {
- return text.replace(/href="http:\/\/(127.0.0.1|localhost):43110/g, 'href="http://zero');
- } else {
- return text.replace(/href="http:\/\/(127.0.0.1|localhost):43110/g, 'href="');
- }
- };
-
- Text.prototype.fixLink = function(link) {
- if (window.is_proxy) {
- return link.replace(/http:\/\/(127.0.0.1|localhost):43110/, 'http://zero');
- } else {
- return link.replace(/http:\/\/(127.0.0.1|localhost):43110/, '');
- }
- };
-
- Text.prototype.toUrl = function(text) {
- return text.replace(/[^A-Za-z0-9]/g, "+").replace(/[+]+/g, "+").replace(/[+]+$/, "");
- };
-
- return Text;
-
- })();
-
- window.is_proxy = window.location.pathname === "/";
-
- window.renderer = new Renderer();
-
- window.Text = new Text();
-
-}).call(this);
-
-
-
-/* ---- data/1BLogC9LN4oPDcruNz3qo1ysa133E9AGg8/js/utils/Time.coffee ---- */
-
-
-(function() {
- var Time;
-
- Time = (function() {
- function Time() {}
-
- Time.prototype.since = function(time) {
- var back, now, secs;
- now = +(new Date) / 1000;
- secs = now - time;
- if (secs < 60) {
- back = "Just now";
- } else if (secs < 60 * 60) {
- back = (Math.round(secs / 60)) + " minutes ago";
- } else if (secs < 60 * 60 * 24) {
- back = (Math.round(secs / 60 / 60)) + " hours ago";
- } else if (secs < 60 * 60 * 24 * 3) {
- back = (Math.round(secs / 60 / 60 / 24)) + " days ago";
- } else {
- back = "on " + this.date(time);
- }
- back = back.replace(/1 ([a-z]+)s/, "1 $1");
- return back;
- };
-
- Time.prototype.date = function(timestamp, format) {
- var display, parts;
- if (format == null) {
- format = "short";
- }
- parts = (new Date(timestamp * 1000)).toString().split(" ");
- if (format === "short") {
- display = parts.slice(1, 4);
- } else {
- display = parts.slice(1, 5);
- }
- return display.join(" ").replace(/( [0-9]{4})/, ",$1");
- };
-
- Time.prototype.timestamp = function(date) {
- if (date == null) {
- date = "";
- }
- if (date === "now" || date === "") {
- return parseInt(+(new Date) / 1000);
- } else {
- return parseInt(Date.parse(date) / 1000);
- }
- };
-
- Time.prototype.readtime = function(text) {
- var chars;
- chars = text.length;
- if (chars > 1500) {
- return parseInt(chars / 1500) + " min read";
- } else {
- return "less than 1 min read";
- }
- };
-
- return Time;
-
- })();
-
- window.Time = new Time;
-
-}).call(this);
-
-
-
-/* ---- data/1BLogC9LN4oPDcruNz3qo1ysa133E9AGg8/js/utils/ZeroFrame.coffee ---- */
-
-
-(function() {
- var ZeroFrame,
- __bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; },
- __extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; },
- __hasProp = {}.hasOwnProperty;
-
- ZeroFrame = (function(_super) {
- __extends(ZeroFrame, _super);
-
- function ZeroFrame(url) {
- this.onCloseWebsocket = __bind(this.onCloseWebsocket, this);
- this.onOpenWebsocket = __bind(this.onOpenWebsocket, this);
- this.route = __bind(this.route, this);
- this.onMessage = __bind(this.onMessage, this);
- this.url = url;
- this.waiting_cb = {};
- this.connect();
- this.next_message_id = 1;
- this.init();
- }
-
- ZeroFrame.prototype.init = function() {
- return this;
- };
-
- ZeroFrame.prototype.connect = function() {
- this.target = window.parent;
- window.addEventListener("message", this.onMessage, false);
- return this.cmd("innerReady");
- };
-
- ZeroFrame.prototype.onMessage = function(e) {
- var cmd, message;
- message = e.data;
- cmd = message.cmd;
- if (cmd === "response") {
- if (this.waiting_cb[message.to] != null) {
- return this.waiting_cb[message.to](message.result);
- } else {
- return this.log("Websocket callback not found:", message);
- }
- } else if (cmd === "wrapperReady") {
- return this.cmd("innerReady");
- } else if (cmd === "ping") {
- return this.response(message.id, "pong");
- } else if (cmd === "wrapperOpenedWebsocket") {
- return this.onOpenWebsocket();
- } else if (cmd === "wrapperClosedWebsocket") {
- return this.onCloseWebsocket();
- } else {
- return this.onRequest(cmd, message);
- }
- };
-
- ZeroFrame.prototype.route = function(cmd, message) {
- return this.log("Unknown command", message);
- };
-
- ZeroFrame.prototype.response = function(to, result) {
- return this.send({
- "cmd": "response",
- "to": to,
- "result": result
- });
- };
-
- ZeroFrame.prototype.cmd = function(cmd, params, cb) {
- if (params == null) {
- params = {};
- }
- if (cb == null) {
- cb = null;
- }
- return this.send({
- "cmd": cmd,
- "params": params
- }, cb);
- };
-
- ZeroFrame.prototype.send = function(message, cb) {
- if (cb == null) {
- cb = null;
- }
- message.id = this.next_message_id;
- this.next_message_id += 1;
- this.target.postMessage(message, "*");
- if (cb) {
- return this.waiting_cb[message.id] = cb;
- }
- };
-
- ZeroFrame.prototype.onOpenWebsocket = function() {
- return this.log("Websocket open");
- };
-
- ZeroFrame.prototype.onCloseWebsocket = function() {
- return this.log("Websocket close");
- };
-
- return ZeroFrame;
-
- })(Class);
-
- window.ZeroFrame = ZeroFrame;
-
-}).call(this);
-
-
-
-/* ---- data/1BLogC9LN4oPDcruNz3qo1ysa133E9AGg8/js/Comments.coffee ---- */
-
-
-(function() {
- var Comments,
- __extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; },
- __hasProp = {}.hasOwnProperty;
-
- Comments = (function(_super) {
- __extends(Comments, _super);
-
- function Comments() {
- return Comments.__super__.constructor.apply(this, arguments);
- }
-
- Comments.prototype.pagePost = function(post_id, cb) {
- if (cb == null) {
- cb = false;
- }
- this.post_id = post_id;
- this.rules = {};
- $(".button-submit-comment").on("click", (function(_this) {
- return function() {
- _this.submitComment();
- return false;
- };
- })(this));
- this.loadComments("noanim", cb);
- this.autoExpand($(".comment-textarea"));
- return $(".certselect").on("click", (function(_this) {
- return function() {
- if (Page.server_info.rev < 160) {
- Page.cmd("wrapperNotification", ["error", "Comments requires at least ZeroNet 0.3.0 Please upgade!"]);
- } else {
- Page.cmd("certSelect", [["zeroid.bit"]]);
- }
- return false;
- };
- })(this));
- };
-
- Comments.prototype.loadComments = function(type, cb) {
- var query;
- if (type == null) {
- type = "show";
- }
- if (cb == null) {
- cb = false;
- }
- query = "SELECT comment.*, json_content.json_id AS content_json_id, keyvalue.value AS cert_user_id, json.directory, (SELECT COUNT(*) FROM comment_vote WHERE comment_vote.comment_uri = comment.comment_id || '@' || json.directory)+1 AS votes FROM comment LEFT JOIN json USING (json_id) LEFT JOIN json AS json_content ON (json_content.directory = json.directory AND json_content.file_name='content.json') LEFT JOIN keyvalue ON (keyvalue.json_id = json_content.json_id AND key = 'cert_user_id') WHERE post_id = " + this.post_id + " ORDER BY date_added DESC";
- return Page.cmd("dbQuery", query, (function(_this) {
- return function(comments) {
- var comment, comment_address, elem, user_address, _i, _len;
- $(".comments-num").text(comments.length);
- for (_i = 0, _len = comments.length; _i < _len; _i++) {
- comment = comments[_i];
- user_address = comment.directory.replace("users/", "");
- comment_address = comment.comment_id + "_" + user_address;
- elem = $("#comment_" + comment_address);
- if (elem.length === 0) {
- elem = $(".comment.template").clone().removeClass("template").attr("id", "comment_" + comment_address).data("post_id", _this.post_id);
- if (type !== "noanim") {
- elem.cssSlideDown();
- }
- $(".reply", elem).on("click", function(e) {
- return _this.buttonReply($(e.target).parents(".comment"));
- });
- }
- _this.applyCommentData(elem, comment);
- elem.appendTo(".comments");
- }
- return setTimeout((function() {
- return Page.addInlineEditors();
- }), 1000);
- };
- })(this));
- };
-
- Comments.prototype.applyCommentData = function(elem, comment) {
- var cert_domain, user_address, user_name, _ref;
- _ref = comment.cert_user_id.split("@"), user_name = _ref[0], cert_domain = _ref[1];
- user_address = comment.directory.replace("users/", "");
- $(".comment-body", elem).html(Text.toMarked(comment.body, {
- "sanitize": true
- }));
- $(".user_name", elem).text(user_name).css({
- "color": Text.toColor(comment.cert_user_id)
- }).attr("title", user_name + "@" + cert_domain + ": " + user_address);
- $(".added", elem).text(Time.since(comment.date_added)).attr("title", Time.date(comment.date_added, "long"));
- if (user_address === Page.site_info.auth_address) {
- $(elem).attr("data-object", "Comment:" + comment.comment_id).attr("data-deletable", "yes");
- return $(".comment-body", elem).attr("data-editable", "body").data("content", comment.body);
- }
- };
-
- Comments.prototype.buttonReply = function(elem) {
- var body_add, elem_quote, post_id, user_name;
- this.log("Reply to", elem);
- user_name = $(".user_name", elem).text();
- post_id = elem.attr("id");
- body_add = "> [" + user_name + "](\#" + post_id + "): ";
- elem_quote = $(".comment-body", elem).clone();
- $("blockquote", elem_quote).remove();
- body_add += elem_quote.text().trim("\n").replace(/\n/g, "\n> ");
- body_add += "\n\n";
- $(".comment-new .comment-textarea").val($(".comment-new .comment-textarea").val() + body_add);
- $(".comment-new .comment-textarea").trigger("input").focus();
- return false;
- };
-
- Comments.prototype.submitComment = function() {
- var body, inner_path;
- if (!Page.site_info.cert_user_id) {
- Page.cmd("wrapperNotification", ["info", "Please, select your account."]);
- return false;
- }
- body = $(".comment-new .comment-textarea").val();
- if (!body) {
- $(".comment-new .comment-textarea").focus();
- return false;
- }
- $(".comment-new .button-submit").addClass("loading");
- inner_path = "data/users/" + Page.site_info.auth_address + "/data.json";
- return Page.cmd("fileGet", {
- "inner_path": inner_path,
- "required": false
- }, (function(_this) {
- return function(data) {
- var json_raw;
- if (data) {
- data = JSON.parse(data);
- } else {
- data = {
- "next_comment_id": 1,
- "comment": [],
- "comment_vote": {}
- };
- }
- data.comment.push({
- "comment_id": data.next_comment_id,
- "body": body,
- "post_id": _this.post_id,
- "date_added": Time.timestamp()
- });
- data.next_comment_id += 1;
- json_raw = unescape(encodeURIComponent(JSON.stringify(data, void 0, '\t')));
- return Page.writePublish(inner_path, btoa(json_raw), function(res) {
- $(".comment-new .button-submit").removeClass("loading");
- _this.loadComments();
- _this.checkCert("updaterules");
- _this.log("Writepublish result", res);
- if (res !== false) {
- return $(".comment-new .comment-textarea").val("");
- }
- });
- };
- })(this));
- };
-
- Comments.prototype.checkCert = function(type) {
- var last_cert_user_id;
- last_cert_user_id = $(".comment-new .user_name").text();
- if (Page.site_info.cert_user_id) {
- $(".comment-new").removeClass("comment-nocert");
- $(".comment-new .user_name").text(Page.site_info.cert_user_id);
- } else {
- $(".comment-new").addClass("comment-nocert");
- $(".comment-new .user_name").text("Please sign in");
- }
- if ($(".comment-new .user_name").text() !== last_cert_user_id || type === "updaterules") {
- if (Page.site_info.cert_user_id) {
- return Page.cmd("fileRules", "data/users/" + Page.site_info.auth_address + "/content.json", (function(_this) {
- return function(rules) {
- _this.rules = rules;
- if (rules.max_size) {
- return _this.setCurrentSize(rules.current_size);
- } else {
- return _this.setCurrentSize(0);
- }
- };
- })(this));
- } else {
- return this.setCurrentSize(0);
- }
- }
- };
-
- Comments.prototype.setCurrentSize = function(current_size) {
- var current_size_kb;
- if (current_size) {
- current_size_kb = current_size / 1000;
- $(".user-size").text("used: " + (current_size_kb.toFixed(1)) + "k/" + (Math.round(this.rules.max_size / 1000)) + "k");
- return $(".user-size-used").css("width", Math.round(70 * current_size / this.rules.max_size));
- } else {
- return $(".user-size").text("");
- }
- };
-
- Comments.prototype.autoExpand = function(elem) {
- var editor;
- editor = elem[0];
- if (elem.height() > 0) {
- elem.height(1);
- }
- elem.on("input", (function(_this) {
- return function() {
- var current_size, min_height, new_height, old_height;
- if (editor.scrollHeight > elem.height()) {
- old_height = elem.height();
- elem.height(1);
- new_height = editor.scrollHeight;
- new_height += parseFloat(elem.css("borderTopWidth"));
- new_height += parseFloat(elem.css("borderBottomWidth"));
- new_height -= parseFloat(elem.css("paddingTop"));
- new_height -= parseFloat(elem.css("paddingBottom"));
- min_height = parseFloat(elem.css("lineHeight")) * 2;
- if (new_height < min_height) {
- new_height = min_height + 4;
- }
- elem.height(new_height - 4);
- }
- if (_this.rules.max_size) {
- if (elem.val().length > 0) {
- current_size = _this.rules.current_size + elem.val().length + 90;
- } else {
- current_size = _this.rules.current_size;
- }
- return _this.setCurrentSize(current_size);
- }
- };
- })(this));
- if (elem.height() > 0) {
- return elem.trigger("input");
- } else {
- return elem.height("48px");
- }
- };
-
- return Comments;
-
- })(Class);
-
- window.Comments = new Comments();
-
-}).call(this);
-
-
-
-/* ---- data/1BLogC9LN4oPDcruNz3qo1ysa133E9AGg8/js/ZeroBlog.coffee ---- */
-
-
-(function() {
- var ZeroBlog,
- __bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; },
- __extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; },
- __hasProp = {}.hasOwnProperty;
-
- ZeroBlog = (function(_super) {
- __extends(ZeroBlog, _super);
-
- function ZeroBlog() {
- this.setSiteinfo = __bind(this.setSiteinfo, this);
- this.actionSetSiteInfo = __bind(this.actionSetSiteInfo, this);
- this.saveContent = __bind(this.saveContent, this);
- this.getContent = __bind(this.getContent, this);
- this.getObject = __bind(this.getObject, this);
- this.onOpenWebsocket = __bind(this.onOpenWebsocket, this);
- this.publish = __bind(this.publish, this);
- this.pageLoaded = __bind(this.pageLoaded, this);
- return ZeroBlog.__super__.constructor.apply(this, arguments);
- }
-
- ZeroBlog.prototype.init = function() {
- this.data = null;
- this.site_info = null;
- this.server_info = null;
- this.event_page_load = $.Deferred();
- this.event_site_info = $.Deferred();
- $.when(this.event_page_load, this.event_site_info).done((function(_this) {
- return function() {
- if (_this.site_info.settings.own || _this.data.demo) {
- _this.addInlineEditors();
- _this.checkPublishbar();
- $(".publishbar").on("click", _this.publish);
- $(".posts .button.new").css("display", "inline-block");
- return $(".editbar .icon-help").on("click", function() {
- $(".editbar .markdown-help").css("display", "block");
- $(".editbar .markdown-help").toggleClassLater("visible", 10);
- $(".editbar .icon-help").toggleClass("active");
- return false;
- });
- }
- };
- })(this));
- $.when(this.event_site_info).done((function(_this) {
- return function() {
- var imagedata;
- _this.log("event site info");
- imagedata = new Identicon(_this.site_info.address, 70).toString();
- return $("body").append("");
- };
- })(this));
- return this.log("inited!");
- };
-
- ZeroBlog.prototype.loadData = function(query) {
- if (query == null) {
- query = "new";
- }
- if (query === "old") {
- query = "SELECT key, value FROM json LEFT JOIN keyvalue USING (json_id) WHERE path = 'data.json'";
- } else {
- query = "SELECT key, value FROM json LEFT JOIN keyvalue USING (json_id) WHERE directory = '' AND file_name = 'data.json'";
- }
- return this.cmd("dbQuery", [query], (function(_this) {
- return function(res) {
- var row, _i, _len;
- _this.data = {};
- if (res) {
- for (_i = 0, _len = res.length; _i < _len; _i++) {
- row = res[_i];
- _this.data[row.key] = row.value;
- }
- $(".left h1 a:not(.editable-edit)").html(_this.data.title).data("content", _this.data.title);
- $(".left h2").html(Text.toMarked(_this.data.description)).data("content", _this.data.description);
- return $(".left .links").html(Text.toMarked(_this.data.links)).data("content", _this.data.links);
- }
- };
- })(this));
- };
-
- ZeroBlog.prototype.routeUrl = function(url) {
- var match;
- this.log("Routing url:", url);
- if (match = url.match(/Post:([0-9]+)/)) {
- $("body").addClass("page-post");
- this.post_id = parseInt(match[1]);
- return this.pagePost();
- } else {
- $("body").addClass("page-main");
- return this.pageMain();
- }
- };
-
- ZeroBlog.prototype.pagePost = function() {
- var s;
- s = +(new Date);
- return this.cmd("dbQuery", ["SELECT * FROM post WHERE post_id = " + this.post_id + " LIMIT 1"], (function(_this) {
- return function(res) {
- if (res.length) {
- _this.applyPostdata($(".post-full"), res[0], true);
- Comments.pagePost(_this.post_id);
- } else {
- $(".post-full").html("Not found ");
- }
- return _this.pageLoaded();
- };
- })(this));
- };
-
- ZeroBlog.prototype.pageMain = function() {
- return this.cmd("dbQuery", ["SELECT post.*, COUNT(comment_id) AS comments FROM post LEFT JOIN comment USING (post_id) GROUP BY post_id ORDER BY date_published"], (function(_this) {
- return function(res) {
- var elem, post, s, _i, _len;
- s = +(new Date);
- for (_i = 0, _len = res.length; _i < _len; _i++) {
- post = res[_i];
- elem = $("#post_" + post.post_id);
- if (elem.length === 0) {
- elem = $(".post.template").clone().removeClass("template").attr("id", "post_" + post.post_id);
- elem.prependTo(".posts");
- }
- _this.applyPostdata(elem, post);
- }
- _this.pageLoaded();
- _this.log("Posts loaded in", (+(new Date)) - s, "ms");
- return $(".posts .new").on("click", function() {
- _this.cmd("fileGet", ["data/data.json"], function(res) {
- var data;
- data = JSON.parse(res);
- data.post.unshift({
- post_id: data.next_post_id,
- title: "New blog post",
- date_published: (+(new Date)) / 1000,
- body: "Blog post body"
- });
- data.next_post_id += 1;
- elem = $(".post.template").clone().removeClass("template");
- _this.applyPostdata(elem, data.post[0]);
- elem.hide();
- elem.prependTo(".posts").slideDown();
- _this.addInlineEditors(elem);
- return _this.writeData(data);
- });
- return false;
- });
- };
- })(this));
- };
-
- ZeroBlog.prototype.pageLoaded = function() {
- $("body").addClass("loaded");
- $('pre code').each(function(i, block) {
- return hljs.highlightBlock(block);
- });
- this.event_page_load.resolve();
- return this.cmd("innerLoaded", true);
- };
-
- ZeroBlog.prototype.addInlineEditors = function(parent) {
- var editor, elem, elems, _i, _len;
- this.logStart("Adding inline editors");
- elems = $("[data-editable]:visible", parent);
- for (_i = 0, _len = elems.length; _i < _len; _i++) {
- elem = elems[_i];
- elem = $(elem);
- if (!elem.data("editor") && !elem.hasClass("editor")) {
- editor = new InlineEditor(elem, this.getContent, this.saveContent, this.getObject);
- elem.data("editor", editor);
- }
- }
- return this.logEnd("Adding inline editors");
- };
-
- ZeroBlog.prototype.checkPublishbar = function() {
- if (!this.site_modified || this.site_modified > this.site_info.content.modified) {
- return $(".publishbar").addClass("visible");
- } else {
- return $(".publishbar").removeClass("visible");
- }
- };
-
- ZeroBlog.prototype.publish = function() {
- this.cmd("wrapperPrompt", ["Enter your private key:", "password"], (function(_this) {
- return function(privatekey) {
- $(".publishbar .button").addClass("loading");
- return _this.cmd("sitePublish", [privatekey], function(res) {
- $(".publishbar .button").removeClass("loading");
- return _this.log("Publish result:", res);
- });
- };
- })(this));
- return false;
- };
-
- ZeroBlog.prototype.applyPostdata = function(elem, post, full) {
- var body, date_published, title_hash;
- if (full == null) {
- full = false;
- }
- title_hash = post.title.replace(/[#?& ]/g, "+").replace(/[+]+/g, "+");
- elem.data("object", "Post:" + post.post_id);
- $(".title .editable", elem).html(post.title).attr("href", "?Post:" + post.post_id + ":" + title_hash).data("content", post.title);
- date_published = Time.since(post.date_published);
- if (post.body.match(/^---/m)) {
- date_published += " · " + (Time.readtime(post.body));
- $(".more", elem).css("display", "inline-block").attr("href", "?Post:" + post.post_id + ":" + title_hash);
- }
- $(".details .published", elem).html(date_published).data("content", post.date_published);
- if (post.comments > 0) {
- $(".details .comments-num", elem).css("display", "inline").attr("href", "?Post:" + post.post_id + ":" + title_hash + "#Comments");
- $(".details .comments-num .num", elem).text(post.comments + " comments");
- } else {
- $(".details .comments-num", elem).css("display", "none");
- }
- if (full) {
- body = post.body;
- } else {
- body = post.body.replace(/^([\s\S]*?)\n---\n[\s\S]*$/, "$1");
- }
- return $(".body", elem).html(Text.toMarked(body)).data("content", post.body);
- };
-
- ZeroBlog.prototype.onOpenWebsocket = function(e) {
- this.loadData();
- this.routeUrl(window.location.search.substring(1));
- this.cmd("siteInfo", {}, this.setSiteinfo);
- return this.cmd("serverInfo", {}, (function(_this) {
- return function(ret) {
- _this.server_info = ret;
- if (_this.server_info.rev < 160) {
- return _this.loadData("old");
- }
- };
- })(this));
- };
-
- ZeroBlog.prototype.getObject = function(elem) {
- return elem.parents("[data-object]:first");
- };
-
- ZeroBlog.prototype.getContent = function(elem, raw) {
- var content, id, type, _ref;
- if (raw == null) {
- raw = false;
- }
- _ref = this.getObject(elem).data("object").split(":"), type = _ref[0], id = _ref[1];
- id = parseInt(id);
- content = elem.data("content");
- if (elem.data("editable-mode") === "timestamp") {
- content = Time.date(content, "full");
- }
- if (elem.data("editable-mode") === "simple" || raw) {
- return content;
- } else {
- return Text.toMarked(content);
- }
- };
-
- ZeroBlog.prototype.saveContent = function(elem, content, cb) {
- var id, type, _ref;
- if (cb == null) {
- cb = false;
- }
- if (elem.data("deletable") && content === null) {
- return this.deleteObject(elem, cb);
- }
- elem.data("content", content);
- _ref = this.getObject(elem).data("object").split(":"), type = _ref[0], id = _ref[1];
- id = parseInt(id);
- if (type === "Post" || type === "Site") {
- return this.saveSite(elem, type, id, content, cb);
- } else if (type === "Comment") {
- return this.saveComment(elem, type, id, content, cb);
- }
- };
-
- ZeroBlog.prototype.saveSite = function(elem, type, id, content, cb) {
- return this.cmd("fileGet", ["data/data.json"], (function(_this) {
- return function(res) {
- var data, post;
- data = JSON.parse(res);
- if (type === "Post") {
- post = ((function() {
- var _i, _len, _ref, _results;
- _ref = data.post;
- _results = [];
- for (_i = 0, _len = _ref.length; _i < _len; _i++) {
- post = _ref[_i];
- if (post.post_id === id) {
- _results.push(post);
- }
- }
- return _results;
- })())[0];
- if (elem.data("editable-mode") === "timestamp") {
- content = Time.timestamp(content);
- }
- post[elem.data("editable")] = content;
- } else if (type === "Site") {
- data[elem.data("editable")] = content;
- }
- return _this.writeData(data, function(res) {
- if (cb) {
- if (res === true) {
- if (elem.data("editable-mode") === "simple") {
- return cb(content);
- } else if (elem.data("editable-mode") === "timestamp") {
- return cb(Time.since(content));
- } else {
- return cb(Text.toMarked(content));
- }
- } else {
- return cb(false);
- }
- }
- });
- };
- })(this));
- };
-
- ZeroBlog.prototype.saveComment = function(elem, type, id, content, cb) {
- var inner_path;
- this.log("Saving comment...", id);
- this.getObject(elem).css("height", "auto");
- inner_path = "data/users/" + Page.site_info.auth_address + "/data.json";
- return Page.cmd("fileGet", {
- "inner_path": inner_path,
- "required": false
- }, (function(_this) {
- return function(data) {
- var comment, json_raw;
- data = JSON.parse(data);
- comment = ((function() {
- var _i, _len, _ref, _results;
- _ref = data.comment;
- _results = [];
- for (_i = 0, _len = _ref.length; _i < _len; _i++) {
- comment = _ref[_i];
- if (comment.comment_id === id) {
- _results.push(comment);
- }
- }
- return _results;
- })())[0];
- comment[elem.data("editable")] = content;
- _this.log(data);
- json_raw = unescape(encodeURIComponent(JSON.stringify(data, void 0, '\t')));
- return _this.writePublish(inner_path, btoa(json_raw), function(res) {
- if (res === true) {
- Comments.checkCert("updaterules");
- if (cb) {
- return cb(Text.toMarked(content, {
- "sanitize": true
- }));
- }
- } else {
- _this.cmd("wrapperNotification", ["error", "File write error: " + res]);
- if (cb) {
- return cb(false);
- }
- }
- });
- };
- })(this));
- };
-
- ZeroBlog.prototype.deleteObject = function(elem, cb) {
- var id, inner_path, type, _ref;
- if (cb == null) {
- cb = False;
- }
- _ref = elem.data("object").split(":"), type = _ref[0], id = _ref[1];
- id = parseInt(id);
- if (type === "Post") {
- return this.cmd("fileGet", ["data/data.json"], (function(_this) {
- return function(res) {
- var data, post;
- data = JSON.parse(res);
- if (type === "Post") {
- post = ((function() {
- var _i, _len, _ref1, _results;
- _ref1 = data.post;
- _results = [];
- for (_i = 0, _len = _ref1.length; _i < _len; _i++) {
- post = _ref1[_i];
- if (post.post_id === id) {
- _results.push(post);
- }
- }
- return _results;
- })())[0];
- if (!post) {
- return false;
- }
- data.post.splice(data.post.indexOf(post), 1);
- return _this.writeData(data, function(res) {
- if (cb) {
- cb();
- }
- if (res === true) {
- return elem.slideUp();
- }
- });
- }
- };
- })(this));
- } else if (type === "Comment") {
- inner_path = "data/users/" + Page.site_info.auth_address + "/data.json";
- return this.cmd("fileGet", {
- "inner_path": inner_path,
- "required": false
- }, (function(_this) {
- return function(data) {
- var comment, json_raw;
- data = JSON.parse(data);
- comment = ((function() {
- var _i, _len, _ref1, _results;
- _ref1 = data.comment;
- _results = [];
- for (_i = 0, _len = _ref1.length; _i < _len; _i++) {
- comment = _ref1[_i];
- if (comment.comment_id === id) {
- _results.push(comment);
- }
- }
- return _results;
- })())[0];
- data.comment.splice(data.comment.indexOf(comment), 1);
- json_raw = unescape(encodeURIComponent(JSON.stringify(data, void 0, '\t')));
- return _this.writePublish(inner_path, btoa(json_raw), function(res) {
- if (res === true) {
- elem.slideUp();
- }
- if (cb) {
- return cb();
- }
- });
- };
- })(this));
- }
- };
-
- ZeroBlog.prototype.writeData = function(data, cb) {
- var json_raw;
- if (cb == null) {
- cb = null;
- }
- if (!data) {
- return this.log("Data missing");
- }
- this.data["modified"] = data.modified = Time.timestamp();
- json_raw = unescape(encodeURIComponent(JSON.stringify(data, void 0, '\t')));
- this.cmd("fileWrite", ["data/data.json", btoa(json_raw)], (function(_this) {
- return function(res) {
- if (res === "ok") {
- if (cb) {
- cb(true);
- }
- } else {
- _this.cmd("wrapperNotification", ["error", "File write error: " + res]);
- if (cb) {
- cb(false);
- }
- }
- return _this.checkPublishbar();
- };
- })(this));
- return this.cmd("fileGet", ["content.json"], (function(_this) {
- return function(content) {
- content = content.replace(/"title": ".*?"/, "\"title\": \"" + data.title + "\"");
- return _this.cmd("fileWrite", ["content.json", btoa(content)], function(res) {
- if (res !== "ok") {
- return _this.cmd("wrapperNotification", ["error", "Content.json write error: " + res]);
- }
- });
- };
- })(this));
- };
-
- ZeroBlog.prototype.writePublish = function(inner_path, data, cb) {
- return this.cmd("fileWrite", [inner_path, data], (function(_this) {
- return function(res) {
- if (res !== "ok") {
- _this.cmd("wrapperNotification", ["error", "File write error: " + res]);
- cb(false);
- return false;
- }
- return _this.cmd("sitePublish", {
- "inner_path": inner_path
- }, function(res) {
- if (res === "ok") {
- return cb(true);
- } else {
- return cb(res);
- }
- });
- };
- })(this));
- };
-
- ZeroBlog.prototype.onRequest = function(cmd, message) {
- if (cmd === "setSiteInfo") {
- return this.actionSetSiteInfo(message);
- } else {
- return this.log("Unknown command", message);
- }
- };
-
- ZeroBlog.prototype.actionSetSiteInfo = function(message) {
- this.setSiteinfo(message.params);
- return this.checkPublishbar();
- };
-
- ZeroBlog.prototype.setSiteinfo = function(site_info) {
- var _ref, _ref1;
- this.site_info = site_info;
- this.event_site_info.resolve(site_info);
- if ($("body").hasClass("page-post")) {
- Comments.checkCert();
- }
- if (((_ref = site_info.event) != null ? _ref[0] : void 0) === "file_done" && site_info.event[1].match(/.*users.*data.json$/)) {
- if ($("body").hasClass("page-post")) {
- Comments.loadComments();
- }
- if ($("body").hasClass("page-main")) {
- return RateLimit(500, (function(_this) {
- return function() {
- return _this.pageMain();
- };
- })(this));
- }
- } else if (((_ref1 = site_info.event) != null ? _ref1[0] : void 0) === "file_done" && site_info.event[1] === "data/data.json") {
- this.loadData();
- if ($("body").hasClass("page-main")) {
- this.pageMain();
- }
- if ($("body").hasClass("page-post")) {
- return this.pagePost();
- }
- } else {
-
- }
- };
-
- return ZeroBlog;
-
- })(ZeroFrame);
-
- window.Page = new ZeroBlog();
-
-}).call(this);
diff --git a/src/Tor/TorManager.py b/src/Tor/TorManager.py
deleted file mode 100644
index 7e5c8bb0..00000000
--- a/src/Tor/TorManager.py
+++ /dev/null
@@ -1,308 +0,0 @@
-import logging
-import re
-import socket
-import binascii
-import sys
-import os
-import time
-import random
-import subprocess
-import atexit
-
-import gevent
-
-from Config import config
-from Crypt import CryptRsa
-from Site import SiteManager
-import socks
-from gevent.lock import RLock
-from Debug import Debug
-from Plugin import PluginManager
-
-
-@PluginManager.acceptPlugins
-class TorManager(object):
- def __init__(self, fileserver_ip=None, fileserver_port=None):
- self.privatekeys = {} # Onion: Privatekey
- self.site_onions = {} # Site address: Onion
- self.tor_exe = "tools/tor/tor.exe"
- self.has_meek_bridges = os.path.isfile("tools/tor/PluggableTransports/meek-client.exe")
- self.tor_process = None
- self.log = logging.getLogger("TorManager")
- self.start_onions = None
- self.conn = None
- self.lock = RLock()
- self.starting = True
- self.connecting = True
- self.status = None
- self.event_started = gevent.event.AsyncResult()
-
- if config.tor == "disable":
- self.enabled = False
- self.start_onions = False
- self.setStatus("Disabled")
- else:
- self.enabled = True
- self.setStatus("Waiting")
-
- if fileserver_port:
- self.fileserver_port = fileserver_port
- else:
- self.fileserver_port = config.fileserver_port
-
- self.ip, self.port = config.tor_controller.rsplit(":", 1)
- self.port = int(self.port)
-
- self.proxy_ip, self.proxy_port = config.tor_proxy.rsplit(":", 1)
- self.proxy_port = int(self.proxy_port)
-
- def start(self):
- self.log.debug("Starting (Tor: %s)" % config.tor)
- self.starting = True
- try:
- if not self.connect():
- raise Exception(self.status)
- self.log.debug("Tor proxy port %s check ok" % config.tor_proxy)
- except Exception as err:
- if sys.platform.startswith("win") and os.path.isfile(self.tor_exe):
- self.log.info("Starting self-bundled Tor, due to Tor proxy port %s check error: %s" % (config.tor_proxy, err))
- # Change to self-bundled Tor ports
- self.port = 49051
- self.proxy_port = 49050
- if config.tor == "always":
- socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", self.proxy_port)
- self.enabled = True
- if not self.connect():
- self.startTor()
- else:
- self.log.info("Disabling Tor, because error while accessing Tor proxy at port %s: %s" % (config.tor_proxy, err))
- self.enabled = False
-
- def setStatus(self, status):
- self.status = status
- if "main" in sys.modules: # import main has side-effects, breaks tests
- import main
- if "ui_server" in dir(main):
- main.ui_server.updateWebsocket()
-
- def startTor(self):
- if sys.platform.startswith("win"):
- try:
- self.log.info("Starting Tor client %s..." % self.tor_exe)
- tor_dir = os.path.dirname(self.tor_exe)
- startupinfo = subprocess.STARTUPINFO()
- startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
- cmd = r"%s -f torrc --defaults-torrc torrc-defaults --ignore-missing-torrc" % self.tor_exe
- if config.tor_use_bridges:
- cmd += " --UseBridges 1"
-
- self.tor_process = subprocess.Popen(cmd, cwd=tor_dir, close_fds=True, startupinfo=startupinfo)
- for wait in range(1, 3): # Wait for startup
- time.sleep(wait * 0.5)
- self.enabled = True
- if self.connect():
- if self.isSubprocessRunning():
- self.request("TAKEOWNERSHIP") # Shut down Tor client when controll connection closed
- break
- # Terminate on exit
- atexit.register(self.stopTor)
- except Exception as err:
- self.log.error("Error starting Tor client: %s" % Debug.formatException(str(err)))
- self.enabled = False
- self.starting = False
- self.event_started.set(False)
- return False
-
- def isSubprocessRunning(self):
- return self.tor_process and self.tor_process.pid and self.tor_process.poll() is None
-
- def stopTor(self):
- self.log.debug("Stopping...")
- try:
- if self.isSubprocessRunning():
- self.request("SIGNAL SHUTDOWN")
- except Exception as err:
- self.log.error("Error stopping Tor: %s" % err)
-
- def connect(self):
- if not self.enabled:
- return False
- self.site_onions = {}
- self.privatekeys = {}
-
- return self.connectController()
-
- def connectController(self):
- if "socket_noproxy" in dir(socket): # Socket proxy-patched, use non-proxy one
- conn = socket.socket_noproxy(socket.AF_INET, socket.SOCK_STREAM)
- else:
- conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-
- self.log.debug("Connecting to Tor Controller %s:%s" % (self.ip, self.port))
- self.connecting = True
- try:
- with self.lock:
- conn.connect((self.ip, self.port))
-
- # Auth cookie file
- res_protocol = self.send("PROTOCOLINFO", conn)
- cookie_match = re.search('COOKIEFILE="(.*?)"', res_protocol)
-
- if config.tor_password:
- res_auth = self.send('AUTHENTICATE "%s"' % config.tor_password, conn)
- elif cookie_match:
- cookie_file = cookie_match.group(1).encode("ascii").decode("unicode_escape")
- if not os.path.isfile(cookie_file) and self.tor_process:
- # Workaround for tor client cookie auth file utf8 encoding bug (https://github.com/torproject/stem/issues/57)
- cookie_file = os.path.dirname(self.tor_exe) + "\\data\\control_auth_cookie"
- auth_hex = binascii.b2a_hex(open(cookie_file, "rb").read())
- res_auth = self.send("AUTHENTICATE %s" % auth_hex.decode("utf8"), conn)
- else:
- res_auth = self.send("AUTHENTICATE", conn)
-
- if "250 OK" not in res_auth:
- raise Exception("Authenticate error %s" % res_auth)
-
- # Version 0.2.7.5 required because ADD_ONION support
- res_version = self.send("GETINFO version", conn)
- version = re.search(r'version=([0-9\.]+)', res_version).group(1)
- if float(version.replace(".", "0", 2)) < 207.5:
- raise Exception("Tor version >=0.2.7.5 required, found: %s" % version)
-
- self.setStatus("Connected (%s)" % res_auth)
- self.event_started.set(True)
- self.starting = False
- self.connecting = False
- self.conn = conn
- except Exception as err:
- self.conn = None
- self.setStatus("Error (%s)" % str(err))
- self.log.warning("Tor controller connect error: %s" % Debug.formatException(str(err)))
- self.enabled = False
- return self.conn
-
- def disconnect(self):
- if self.conn:
- self.conn.close()
- self.conn = None
-
- def startOnions(self):
- if self.enabled:
- self.log.debug("Start onions")
- self.start_onions = True
- self.getOnion("global")
-
- # Get new exit node ip
- def resetCircuits(self):
- res = self.request("SIGNAL NEWNYM")
- if "250 OK" not in res:
- self.setStatus("Reset circuits error (%s)" % res)
- self.log.error("Tor reset circuits error: %s" % res)
-
- def addOnion(self):
- if len(self.privatekeys) >= config.tor_hs_limit:
- return random.choice([key for key in list(self.privatekeys.keys()) if key != self.site_onions.get("global")])
-
- result = self.makeOnionAndKey()
- if result:
- onion_address, onion_privatekey = result
- self.privatekeys[onion_address] = onion_privatekey
- self.setStatus("OK (%s onions running)" % len(self.privatekeys))
- SiteManager.peer_blacklist.append((onion_address + ".onion", self.fileserver_port))
- return onion_address
- else:
- return False
-
- def makeOnionAndKey(self):
- res = self.request("ADD_ONION NEW:RSA1024 port=%s" % self.fileserver_port)
- match = re.search("ServiceID=([A-Za-z0-9]+).*PrivateKey=RSA1024:(.*?)[\r\n]", res, re.DOTALL)
- if match:
- onion_address, onion_privatekey = match.groups()
- return (onion_address, onion_privatekey)
- else:
- self.setStatus("AddOnion error (%s)" % res)
- self.log.error("Tor addOnion error: %s" % res)
- return False
-
- def delOnion(self, address):
- res = self.request("DEL_ONION %s" % address)
- if "250 OK" in res:
- del self.privatekeys[address]
- self.setStatus("OK (%s onion running)" % len(self.privatekeys))
- return True
- else:
- self.setStatus("DelOnion error (%s)" % res)
- self.log.error("Tor delOnion error: %s" % res)
- self.disconnect()
- return False
-
- def request(self, cmd):
- with self.lock:
- if not self.enabled:
- return False
- if not self.conn:
- if not self.connect():
- return ""
- return self.send(cmd)
-
- def send(self, cmd, conn=None):
- if not conn:
- conn = self.conn
- self.log.debug("> %s" % cmd)
- back = ""
- for retry in range(2):
- try:
- conn.sendall(b"%s\r\n" % cmd.encode("utf8"))
- while not back.endswith("250 OK\r\n"):
- back += conn.recv(1024 * 64).decode("utf8")
- break
- except Exception as err:
- self.log.error("Tor send error: %s, reconnecting..." % err)
- if not self.connecting:
- self.disconnect()
- time.sleep(1)
- self.connect()
- back = None
- if back:
- self.log.debug("< %s" % back.strip())
- return back
-
- def getPrivatekey(self, address):
- return self.privatekeys[address]
-
- def getPublickey(self, address):
- return CryptRsa.privatekeyToPublickey(self.privatekeys[address])
-
- def getOnion(self, site_address):
- if not self.enabled:
- return None
-
- if config.tor == "always": # Different onion for every site
- onion = self.site_onions.get(site_address)
- else: # Same onion for every site
- onion = self.site_onions.get("global")
- site_address = "global"
-
- if not onion:
- with self.lock:
- self.site_onions[site_address] = self.addOnion()
- onion = self.site_onions[site_address]
- self.log.debug("Created new hidden service for %s: %s" % (site_address, onion))
-
- return onion
-
- # Creates and returns a
- # socket that has connected to the Tor Network
- def createSocket(self, onion, port):
- if not self.enabled:
- return False
- self.log.debug("Creating new Tor socket to %s:%s" % (onion, port))
- if self.starting:
- self.log.debug("Waiting for startup...")
- self.event_started.get()
- if config.tor == "always": # Every socket is proxied by default, in this mode
- sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- else:
- sock = socks.socksocket()
- sock.set_proxy(socks.SOCKS5, self.proxy_ip, self.proxy_port)
- return sock
diff --git a/src/Tor/__init__.py b/src/Tor/__init__.py
deleted file mode 100644
index d0fcffaf..00000000
--- a/src/Tor/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .TorManager import TorManager
\ No newline at end of file
diff --git a/src/Translate/Translate.py b/src/Translate/Translate.py
deleted file mode 100644
index e73f9be1..00000000
--- a/src/Translate/Translate.py
+++ /dev/null
@@ -1,135 +0,0 @@
-import os
-import json
-import logging
-import inspect
-import re
-import html
-import string
-
-from Config import config
-
-translates = []
-
-
-class EscapeProxy(dict):
- # Automatically escape the accessed string values
- def __getitem__(self, key):
- val = dict.__getitem__(self, key)
- if type(val) in (str, str):
- return html.escape(val)
- elif type(val) is dict:
- return EscapeProxy(val)
- elif type(val) is list:
- return EscapeProxy(enumerate(val)) # Convert lists to dict
- else:
- return val
-
-
-class Translate(dict):
- def __init__(self, lang_dir=None, lang=None):
- if not lang_dir:
- lang_dir = os.path.dirname(__file__) + "/languages/"
- if not lang:
- lang = config.language
- self.lang = lang
- self.lang_dir = lang_dir
- self.setLanguage(lang)
- self.formatter = string.Formatter()
-
- if config.debug:
- # Auto reload FileRequest on change
- from Debug import DebugReloader
- DebugReloader.watcher.addCallback(self.load)
-
- translates.append(self)
-
- def setLanguage(self, lang):
- self.lang = re.sub("[^a-z-]", "", lang)
- self.lang_file = self.lang_dir + "%s.json" % lang
- self.load()
-
- def __repr__(self):
- return "" % self.lang
-
- def load(self):
- if self.lang == "en":
- data = {}
- dict.__init__(self, data)
- self.clear()
- elif os.path.isfile(self.lang_file):
- try:
- data = json.load(open(self.lang_file, encoding="utf8"))
- logging.debug("Loaded translate file: %s (%s entries)" % (self.lang_file, len(data)))
- except Exception as err:
- logging.error("Error loading translate file %s: %s" % (self.lang_file, err))
- data = {}
- dict.__init__(self, data)
- else:
- data = {}
- dict.__init__(self, data)
- self.clear()
- logging.debug("Translate file not exists: %s" % self.lang_file)
-
- def format(self, s, kwargs, nested=False):
- kwargs["_"] = self
- if nested:
- back = self.formatter.vformat(s, [], kwargs) # PY3 TODO: Change to format_map
- return self.formatter.vformat(back, [], kwargs)
- else:
- return self.formatter.vformat(s, [], kwargs)
-
- def formatLocals(self, s, nested=False):
- kwargs = inspect.currentframe().f_back.f_locals
- return self.format(s, kwargs, nested=nested)
-
- def __call__(self, s, kwargs=None, nested=False, escape=True):
- if not kwargs:
- kwargs = inspect.currentframe().f_back.f_locals
- if escape:
- kwargs = EscapeProxy(kwargs)
- return self.format(s, kwargs, nested=nested)
-
- def __missing__(self, key):
- return key
-
- def pluralize(self, value, single, multi):
- if value > 1:
- return self[multi].format(value)
- else:
- return self[single].format(value)
-
- def translateData(self, data, translate_table=None, mode="js"):
- if not translate_table:
- translate_table = self
-
- patterns = []
- for key, val in list(translate_table.items()):
- if key.startswith("_("): # Problematic string: only match if called between _(" ") function
- key = key.replace("_(", "").replace(")", "").replace(", ", '", "')
- translate_table[key] = "|" + val
- patterns.append(re.escape(key))
-
- def replacer(match):
- target = translate_table[match.group(1)]
- if mode == "js":
- if target and target[0] == "|": # Strict string match
- if match.string[match.start() - 2] == "_": # Only if the match if called between _(" ") function
- return '"' + target[1:] + '"'
- else:
- return '"' + match.group(1) + '"'
- return '"' + target + '"'
- else:
- return match.group(0)[0] + target + match.group(0)[-1]
-
- if mode == "html":
- pattern = '[">](' + "|".join(patterns) + ')["<]'
- else:
- pattern = '"(' + "|".join(patterns) + ')"'
- data = re.sub(pattern, replacer, data)
-
- if mode == "html":
- data = data.replace("lang={lang}", "lang=%s" % self.lang) # lang get parameter to .js file to avoid cache
-
- return data
-
-translate = Translate()
diff --git a/src/Translate/__init__.py b/src/Translate/__init__.py
deleted file mode 100644
index ba0ab6d4..00000000
--- a/src/Translate/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .Translate import *
\ No newline at end of file
diff --git a/src/Translate/languages/da.json b/src/Translate/languages/da.json
deleted file mode 100644
index 8e6f0845..00000000
--- a/src/Translate/languages/da.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
- "Congratulations, your port {0} is opened. You are a full member of the ZeroNet network!": "Tillykke, din port ({0} ) er åben. Du er nu fuld klient på ZeroNet!",
- "Tor mode active, every connection using Onion route.": "TOR er aktiv, alle forbindelser anvender Onions.",
- "Successfully started Tor onion hidden services.": "OK. Startede TOR skjult onion service.",
- "Unable to start hidden services, please check your config.": "Fejl. Kunne ikke starte TOR skjult onion service. Tjek din opsætning!",
- "For faster connections open {0} port on your router.": "Åben port {0} på din router for hurtigere forbindelse.",
- "Your connection is restricted. Please, open {0} port on your router": "Begrænset forbindelse. Åben venligst port {0} på din router",
- "or configure Tor to become a full member of the ZeroNet network.": "eller opsæt TOR for fuld adgang til ZeroNet!",
-
- "Select account you want to use in this site:": "Vælg bruger til brug på denne side:",
- "currently selected": "nuværende bruger",
- "Unique to site": "Unik på siden",
-
- "Content signing failed": "Signering af indhold fejlede",
- "Content publish queued for {0:.0f} seconds.": "Indhold i kø for offentliggørelse i {0:.0f} sekunder.",
- "Content published to {0} peers.": "Indhold offentliggjort til {0} klienter.",
- "No peers found, but your content is ready to access.": "Ingen klienter fundet, men dit indhold er klar til hentning.",
- "Your network connection is restricted. Please, open {0} port": "Din forbindelse er begrænset. Åben venligst port {0} ",
- "on your router to make your site accessible for everyone.": "på din router for at dele din side med alle.",
- "Content publish failed.": "Offentliggørelse af indhold fejlede.",
- "This file still in sync, if you write it now, then the previous content may be lost.": "Denne fil er endnu ikke delt færdig. Tidligere indhold kan gå tabt hvis du skriver til filen nu.",
- "Write content anyway": "Del indhold alligevel",
- "New certificate added:": "Nyt certifikat oprettet:",
- "You current certificate:": "Dit nuværende certifikat: ",
- "Change it to {auth_type}/{auth_user_name}@{domain}": "Skift certificat til {auth_type}/{auth_user_name}@{domain}",
- "Certificate changed to: {auth_type}/{auth_user_name}@{domain} .": "Certifikat ændret til {auth_type}/{auth_user_name}@{domain} .",
- "Site cloned": "Side klonet",
-
- "You have successfully changed the web interface's language!": "OK. Du har nu skiftet sprog på web brugergrænsefladen!",
- "Due to the browser's caching, the full transformation could take some minute.": "Pga. browser cache kan skift af sprog tage nogle minutter.",
-
- "Connection with UiServer Websocket was lost. Reconnecting...": "Forbindelse til UiServer Websocket blev tabt. Genopretter forbindelse...",
- "Connection with UiServer Websocket recovered.": "Forbindelse til UiServer Websocket genoprettet.",
- "UiServer Websocket error, please reload the page.": "UiServer Websocket fejl. Genindlæs venligst siden (F5)!",
- " Connecting...": " Opretter forbindelse...",
- "Site size: ": "Side størrelse: ",
- "MB is larger than default allowed ": "MB er større end den tilladte default ",
- "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Åben side og sæt max side størrelse til \" + site_info.next_size_limit + \"MB",
- " files needs to be downloaded": " filer skal downloades",
- " downloaded": " downloadet",
- " download failed": " download fejlede",
- "Peers found: ": "Klienter fundet: ",
- "No peers found": "Ingen klienter fundet",
- "Running out of size limit (": "Siden fylder snart for meget (",
- "Set limit to \" + site_info.next_size_limit + \"MB": "Ret max side størrelse til \" + site_info.next_size_limit + \"MB",
- "Site size limit changed to {0}MB": "Max side størrelse ændret til {0}MB",
- " New version of this page has just released. Reload to see the modified content.": " Ny version af denne side er blevet offentliggjort. Genindlæs venligst siden (F5) for at se nyt indhold!",
- "This site requests permission:": "Denne side betyder om tilladdelse:",
- "_(Accept)": "Tillad"
-
-}
diff --git a/src/Translate/languages/de.json b/src/Translate/languages/de.json
deleted file mode 100644
index 1cc63b74..00000000
--- a/src/Translate/languages/de.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
- "Congratulations, your port {0} is opened. You are a full member of the ZeroNet network!": "Gratulation, dein Port {0} ist offen. Du bist ein volles Mitglied des ZeroNet Netzwerks!",
- "Tor mode active, every connection using Onion route.": "Tor Modus aktiv, jede Verbindung nutzt die Onion Route.",
- "Successfully started Tor onion hidden services.": "Tor versteckte Dienste erfolgreich gestartet.",
- "Unable to start hidden services, please check your config.": "Nicht möglich versteckte Dienste zu starten.",
- "For faster connections open {0} port on your router.": "Für schnellere Verbindungen, öffne Port {0} auf deinem Router.",
- "Your connection is restricted. Please, open {0} port on your router": "Deine Verbindung ist eingeschränkt. Bitte öffne Port {0} auf deinem Router",
- "or configure Tor to become a full member of the ZeroNet network.": "oder konfiguriere Tor um ein volles Mitglied des ZeroNet Netzwerks zu werden.",
-
- "Select account you want to use in this site:": "Wähle das Konto, das du auf dieser Seite benutzen willst:",
- "currently selected": "aktuell ausgewählt",
- "Unique to site": "Eindeutig zur Seite",
-
- "Content signing failed": "Signierung des Inhalts fehlgeschlagen",
- "Content publish queued for {0:.0f} seconds.": "Veröffentlichung des Inhalts um {0:.0f} Sekunden verzögert.",
- "Content published to {0} peers.": "Inhalt zu {0} Peers veröffentlicht.",
- "No peers found, but your content is ready to access.": "Keine Peers gefunden, aber dein Inhalt ist bereit zum Zugriff.",
- "Your network connection is restricted. Please, open {0} port": "Deine Netzwerkverbindung ist beschränkt. Bitte öffne Port {0} ",
- "on your router to make your site accessible for everyone.": "auf deinem Router um deine Seite für Jeden zugänglich zu machen.",
- "Content publish failed.": "Inhalt konnte nicht veröffentlicht werden.",
- "This file still in sync, if you write it now, then the previous content may be lost.": "Diese Datei wird noch synchronisiert. Wenn jetzt geschrieben wird geht der vorherige Inhalt verloren.",
- "Write content anyway": "Inhalt trotzdem schreiben",
- "New certificate added:": "Neues Zertifikat hinzugefügt:",
- "You current certificate:": "Dein aktuelles Zertifikat:",
- "Change it to {auth_type}/{auth_user_name}@{domain}": "Ändere es zu {auth_type}/{auth_user_name}@{domain}",
- "Certificate changed to: {auth_type}/{auth_user_name}@{domain} .": "Zertifikat geändert zu: {auth_type}/{auth_user_name}@{domain} .",
- "Site cloned": "Seite geklont",
-
- "You have successfully changed the web interface's language!": "Du hast die Sprache des Webinterface erfolgreich geändert!",
- "Due to the browser's caching, the full transformation could take some minute.": "Aufgrund des Browsercaches kann die volle Transformation Minuten dauern.",
-
- "Connection with UiServer Websocket was lost. Reconnecting...": "Die Verbindung mit UiServer Websocket ist abgebrochen. Neu verbinden...",
- "Connection with UiServer Websocket recovered.": "Die Verbindung mit UiServer Websocket wurde wiederhergestellt.",
- "UiServer Websocket error, please reload the page.": "UiServer Websocket Fehler, bitte Seite neu laden.",
- " Connecting...": " Verbinden...",
- "Site size: ": "Seitengröße: ",
- "MB is larger than default allowed ": "MB ist größer als der erlaubte Standart",
- "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Öffne Seite und setze das Limit auf \" + site_info.next_size_limit + \"MB",
- " files needs to be downloaded": " Dateien müssen noch heruntergeladen werden",
- " downloaded": " heruntergeladen",
- " download failed": " Herunterladen fehlgeschlagen",
- "Peers found: ": "Peers gefunden: ",
- "No peers found": "Keine Peers gefunden",
- "Running out of size limit (": "Das Speicherlimit ist bald ausgeschöpft (",
- "Set limit to \" + site_info.next_size_limit + \"MB": "Limit auf \" + site_info.next_size_limit + \"MB ändern",
- "Site size limit changed to {0}MB": "Speicherlimit für diese Seite auf {0}MB geändert",
- " New version of this page has just released. Reload to see the modified content.": " Neue version dieser Seite wurde gerade veröffentlicht. Lade die Seite neu um den geänderten Inhalt zu sehen.",
- "This site requests permission:": "Diese Seite fordert rechte:",
- "_(Accept)": "Genehmigen"
-
-}
diff --git a/src/Translate/languages/es.json b/src/Translate/languages/es.json
deleted file mode 100644
index 4cac077b..00000000
--- a/src/Translate/languages/es.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
- "Congratulations, your port {0} is opened. You are a full member of the ZeroNet network!": "¡Felicidades! tu puerto {0} está abierto. ¡Eres un miembro completo de la red Zeronet!",
- "Tor mode active, every connection using Onion route.": "Modo Tor activado, cada conexión usa una ruta Onion.",
- "Successfully started Tor onion hidden services.": "Tor ha iniciado satisfactoriamente la ocultación de los servicios onion.",
- "Unable to start hidden services, please check your config.": "No se puedo iniciar los servicios ocultos, por favor comprueba tu configuración.",
- "For faster connections open {0} port on your router.": "Para conexiones más rápidas abre el puerto {0} en tu router.",
- "Your connection is restricted. Please, open {0} port on your router": "Tu conexión está limitada. Por favor, abre el puerto {0} en tu router",
- "or configure Tor to become a full member of the ZeroNet network.": "o configura Tor para convertirte en un miembro completo de la red ZeroNet.",
-
- "Select account you want to use in this site:": "Selecciona la cuenta que quieres utilizar en este sitio:",
- "currently selected": "actualmente seleccionada",
- "Unique to site": "Única para el sitio",
-
- "Content signing failed": "Firma del contenido fallida",
- "Content publish queued for {0:.0f} seconds.": "Publicación de contenido en cola durante {0:.0f} segundos.",
- "Content published to {0} peers.": "Contenido publicado para {0} pares.",
- "No peers found, but your content is ready to access.": "No se ha encontrado pares, pero tu contenido está listo para ser accedido.",
- "Your network connection is restricted. Please, open {0} port": "Tu conexión de red está restringida. Por favor, abre el puerto{0} ",
- "on your router to make your site accessible for everyone.": "en tu router para hacer tu sitio accesible a todo el mundo.",
- "Content publish failed.": "Publicación de contenido fallida.",
- "This file still in sync, if you write it now, then the previous content may be lost.": "Este archivo está aún sincronizado, si le escribes ahora el contenido previo podría perderse.",
- "Write content anyway": "Escribir el contenido de todas formas",
- "New certificate added:": "Nuevo certificado añadido:",
- "You current certificate:": "Tu certificado actual:",
- "Change it to {auth_type}/{auth_user_name}@{domain}": "Cambia esto a {auth_type}/{auth_user_name}@{domain}",
- "Certificate changed to: {auth_type}/{auth_user_name}@{domain} .": "Certificado cambiado a: {auth_type}/{auth_user_name}@{domain} .",
- "Site cloned": "Sitio clonado",
-
- "You have successfully changed the web interface's language!": "¡Has cambiado con éxito el idioma de la interfaz web!",
- "Due to the browser's caching, the full transformation could take some minute.": "Debido a la caché del navegador, la transformación completa podría llevar unos minutos.",
-
- "Connection with UiServer Websocket was lost. Reconnecting...": "Se perdió la conexión con UiServer Websocket . Reconectando...",
- "Connection with UiServer Websocket recovered.": "Conexión con UiServer Websocket recuperada.",
- "UiServer Websocket error, please reload the page.": "Error de UiServer Websocket, por favor recarga la página.",
- " Connecting...": " Conectando...",
- "Site size: ": "Tamaño del sitio: ",
- "MB is larger than default allowed ": "MB es más grande de lo permitido por defecto",
- "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Abre tu sitio and establece el límite de tamaño a \" + site_info.next_size_limit + \"MBs",
- " files needs to be downloaded": " Los archivos necesitan ser descargados",
- " downloaded": " descargados",
- " download failed": " descarga fallida",
- "Peers found: ": "Pares encontrados: ",
- "No peers found": "No se han encontrado pares",
- "Running out of size limit (": "Superando el tamaño límite (",
- "Set limit to \" + site_info.next_size_limit + \"MB": "Establece ellímite a \" + site_info.next_size_limit + \"MB ändern",
- "Site size limit changed to {0}MB": "Límite de tamaño del sitio cambiado a {0}MBs",
- " New version of this page has just released. Reload to see the modified content.": " Se ha publicado una nueva versión de esta página . Recarga para ver el contenido modificado.",
- "This site requests permission:": "Este sitio solicita permiso:",
- "_(Accept)": "Conceder"
-
-}
diff --git a/src/Translate/languages/fa.json b/src/Translate/languages/fa.json
deleted file mode 100644
index e644247a..00000000
--- a/src/Translate/languages/fa.json
+++ /dev/null
@@ -1,50 +0,0 @@
-{
- "Congratulations, your port {0} is opened. You are a full member of the ZeroNet network!": "تبریک، درگاه {0} شما باز شده است. شما یک عضو تمام شبکه ZeroNet هستید!",
- "Tor mode active, every connection using Onion route.": "حالت Tor فعال است، هر ارتباط از مسیریابی پیاز (Onion) استفاده میکند.",
- "Successfully started Tor onion hidden services.": "خدمات پنهان پیاز (Onion) Tor با موفقیت راهاندازی شد.",
- "Unable to start hidden services, please check your config.": "قادر به راهاندازی خدمات پنهان نیستیم، لطفا تنظیمات خود را بررسی نمایید.",
- "For faster connections open {0} port on your router.": "برای ارتباطات سریعتر درگاه {0} را بر روی مسیریاب (روتر) خود باز نمایید.",
- "Your connection is restricted. Please, open {0} port on your router": "ارتباط شما محدودشده است. لطفا درگاه {0} را در مسیریاب (روتر) خود باز نمایید",
- "or configure Tor to become a full member of the ZeroNet network.": "یا پیکربندی Tor را انجام دهید تا به یک عضو تمام شبکه ZeroNet تبدیل شوید.",
-
- "Select account you want to use in this site:": "حسابی را که میخواهید در این سایت استفاده کنید، انتخاب کنید:",
- "currently selected": "در حال حاضر انتخابشده",
- "Unique to site": "مختص به سایت",
-
- "Content signing failed": "امضای محتوا با شکست مواجه شد",
- "Content publish queued for {0:.0f} seconds.": "محتوا در صف انتشار با {0:.0f} ثانیه تاخیر قرار گرفت.",
- "Content published to {0} peers.": "محتوا برای {0} تعداد همتا انتشار یافت.",
- "No peers found, but your content is ready to access.": "همتایی یافت نشد، اما محتوای شما آماده دسترسی است.",
- "Your network connection is restricted. Please, open {0} port": "ارتباط شبکه شما محدودشده است. لطفا درگاه {0} را",
- "on your router to make your site accessible for everyone.": "در مسیریاب (روتر) خود باز کنید تا سایت خود را برای همه در دسترس قرار دهید.",
- "Content publish failed.": "انتشار محتوا موفق نبود.",
- "This file still in sync, if you write it now, then the previous content may be lost.": "این فایل همچنان همگام است، اگز شما آن را بنویسید، ممکن است محتوای قبلی ازبین رود.",
- "Write content anyway": "در هر صورت محتوا را بنویس",
- "New certificate added:": "گواهی جدیدی افزوده شد:",
- "You current certificate:": "گواهی فعلی شما:",
- "Change it to {auth_type}/{auth_user_name}@{domain}": "تغییرش بده به {auth_type}/{auth_user_name}@{domain}",
- "Certificate changed to: {auth_type}/{auth_user_name}@{domain} .": "گواهینامه به: {auth_type}/{auth_user_name}@{domain} تغییر پیدا کرد.",
- "Site cloned": "سایت همسانسازی شد",
-
- "You have successfully changed the web interface's language!": "شما با موفقیت زبان رابط وب را تغییر دادید!",
- "Due to the browser's caching, the full transformation could take some minute.": "به دلیل ذخیرهسازی در مرورگر، امکان دارد تغییر شکل کامل چند دقیقه طول بکشد.",
-
- "Connection with UiServer Websocket was lost. Reconnecting...": "اتصال با UiServer Websocket قطع شد. اتصال دوباره...",
- "Connection with UiServer Websocket recovered.": "ارتباط با UiServer Websocket دوباره برقرار شد.",
- "UiServer Websocket error, please reload the page.": "خطای UiServer Websocket, لطفا صفحه را دوباره بارگیری کنید.",
- " Connecting...": " برقراری ارتباط...",
- "Site size: ": "حجم سایت: ",
- "MB is larger than default allowed ": "MB بیشتر از پیشفرض مجاز است ",
- "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "سایت را باز کرده و محدوده حجم را به \" + site_info.next_size_limit + \"MB تنظیم کن",
- " files needs to be downloaded": " فایلهایی که نیاز است، دانلود شوند",
- " downloaded": " دانلود شد",
- " download failed": " دانلود موفق نبود",
- "Peers found: ": "چند همتا یافت شد: ",
- "No peers found": "همتایی یافت نشد",
- "Running out of size limit (": "عبور کرده از محدوده حجم (",
- "Set limit to \" + site_info.next_size_limit + \"MB": "محدوده را به \" + site_info.next_size_limit + \"MB تنظیم کن",
- "Site size limit changed to {0}MB": "محدوده حجم سایت به {0}MB تغییر کرد",
- " New version of this page has just released. Reload to see the modified content.": " نسخه جدیدی از این صفحه منتشر شده است. برای مشاهده محتوای تغییریافته دوباره بارگیری نمایید.",
- "This site requests permission:": "این سایت درخواست مجوز میکند:",
- "_(Accept)": "_(پذیرفتن)"
-}
diff --git a/src/Translate/languages/fr.json b/src/Translate/languages/fr.json
deleted file mode 100644
index b46ef2c3..00000000
--- a/src/Translate/languages/fr.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
- "Congratulations, your port {0} is opened. You are a full member of the ZeroNet network!": "Félicitations, le port ({0} ) est ouvert. Vous êtes maintenant membre de ZeroNet!!",
- "Tor mode active, every connection using Onion route.": "Tor actif, toutes les connexions utilisent un routage Onion.",
- "Successfully started Tor onion hidden services.": "Tor activé avec succès.",
- "Unable to start hidden services, please check your config.": "Impossible d'activer Tor, veuillez vérifier votre configuration.",
- "For faster connections open {0} port on your router.": "Pour une meilleure connectivité, ouvrez le port {0} sur votre routeur.",
- "Your connection is restricted. Please, open {0} port on your router": "Connectivité limitée. Veuillez ouvrir le port {0} sur votre routeur",
- "or configure Tor to become a full member of the ZeroNet network.": "ou configurez Tor afin d'avoir accès aux pairs ZeroNet Onion.",
-
- "Select account you want to use in this site:": "Sélectionnez le compte que vous voulez utiliser pour ce site:",
- "currently selected": "présentement sélectionné",
- "Unique to site": "Unique au site",
-
- "Content signing failed": "Échec à la signature du contenu",
- "Content publish queued for {0:.0f} seconds.": "Publication du contenu différée {0:.0f} secondes.",
- "Content published to {0} peers.": "Contenu publié à {0} pairs.",
- "No peers found, but your content is ready to access.": "Aucun pair trouvé, mais votre contenu est accessible.",
- "Your network connection is restricted. Please, open {0} port": "Connectivité limitée. Veuillez ouvrir le port {0} ",
- "on your router to make your site accessible for everyone.": "sur votre routeur pour que votre site soit accessible à tous.",
- "Content publish failed.": "Échec de la publication du contenu.",
- "This file still in sync, if you write it now, then the previous content may be lost.": "Ce fichier n'est pas à jour, si vous le modifiez maintenant une version antérieure pourrait être perdue.",
- "Write content anyway": "Enregistrer quand même",
- "New certificate added:": "Nouveau cetificat ajouté :",
- "You current certificate:": "Votre certificat actuel :",
- "Change it to {auth_type}/{auth_user_name}@{domain}": "Changer pour {auth_type}/{auth_user_name}@{domain}",
- "Certificate changed to: {auth_type}/{auth_user_name}@{domain} .": "Certificat changé pour : {auth_type}/{auth_user_name}@{domain} -ra.",
- "Site cloned": "Site cloné",
-
- "You have successfully changed the web interface's language!": "Vous avez modifié la langue d'affichage avec succès!",
- "Due to the browser's caching, the full transformation could take some minute.": "En fonction du cache du navigateur, la modification pourrait prendre quelques minutes.",
-
- "Connection with UiServer Websocket was lost. Reconnecting...": "Connexion avec UiServer Websocket rompue. Reconnexion...",
- "Connection with UiServer Websocket recovered.": "Connexion avec UiServer Websocket rétablie.",
- "UiServer Websocket error, please reload the page.": "Erreur du UiServer Websocket, veuillez recharger la page.",
- " Connecting...": " Connexion...",
- "Site size: ": "Taille du site : ",
- "MB is larger than default allowed ": "MB est plus large que la taille permise par défaut ",
- "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Ouvrez le site et augmentez la taille maximale à \" + site_info.next_size_limit + \"MB-ra",
- " files needs to be downloaded": " fichiers doivent être téléchargés",
- " downloaded": " téléchargés",
- " download failed": " échec de téléchargement",
- "Peers found: ": "Pairs trouvés: ",
- "No peers found": "Aucun pair trouvé",
- "Running out of size limit (": "Vous approchez la taille maximale (",
- "Set limit to \" + site_info.next_size_limit + \"MB": "Augmentez la taille maximale à \" + site_info.next_size_limit + \"MB",
- "Site size limit changed to {0}MB": "Taille maximale du site changée à {0}MB",
- " New version of this page has just released. Reload to see the modified content.": " Une nouvelle version de cette page vient d'être publiée. Rechargez pour voir les modifications.",
- "This site requests permission:": "Ce site requiert une permission :",
- "_(Accept)": "Autoriser"
-
-}
diff --git a/src/Translate/languages/hu.json b/src/Translate/languages/hu.json
deleted file mode 100644
index f9487f1d..00000000
--- a/src/Translate/languages/hu.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
- "Congratulations, your port {0} is opened. You are a full member of the ZeroNet network!": "Gratulálunk, a portod ({0} ) nyitva van. Teljes értékű tagja vagy a hálózatnak!",
- "Tor mode active, every connection using Onion route.": "Tor mód aktív, minden kapcsolat az Onion hálózaton keresztül történik.",
- "Successfully started Tor onion hidden services.": "Sikeresen elindultak a Tor onion titkos szolgáltatások.",
- "Unable to start hidden services, please check your config.": "Nem sikerült elindítani a Tor onion szolgáltatásokat. Kérjük, ellenőrizd a beállításokat!",
- "For faster connections open {0} port on your router.": "A gyorsabb kapcsolatok érdekében nyisd ki a {0} portot a routereden.",
- "Your connection is restricted. Please, open {0} port on your router": "A kapcsolatod korlátozott. Kérjük, nyisd ki a {0} portot a routereden",
- "or configure Tor to become a full member of the ZeroNet network.": "vagy állítsd be a Tor kliensed, hogy teljes értékű tagja legyél a hálózatnak!",
-
- "Select account you want to use in this site:": "Válaszd ki az oldalhoz használt felhasználónevet:",
- "currently selected": "jelenleg kijelölt",
- "Unique to site": "Egyedi az oldalon",
-
- "Content signing failed": "Tartalom aláírása sikeretelen",
- "Content publish queued for {0:.0f} seconds.": "Tartalom publikálása elhalasztva {0:.0f} másodperccel.",
- "Content published to {0} peers.": "Tartalom publikálva {0} fél részére.",
- "No peers found, but your content is ready to access.": "Aktív csatlakozási pont nem található, de a tartalmad készen áll a kiszolgálásra.",
- "Your network connection is restricted. Please, open {0} port": "A kapcsolatod korlátozott. Kérjük, nyisd ki a {0} portot",
- "on your router to make your site accessible for everyone.": "a routereden, hogy az oldalad mindenki számára elérhető legyen.",
- "Content publish failed.": "Sikertelen tartalom publikálás.",
- "This file still in sync, if you write it now, then the previous content may be lost.": "Ez a fájl még letöltés alatt van, ha most felülírod a korábbi tartalma elveszhet.",
- "Write content anyway": "Felülírás",
- "New certificate added:": "Új tanúsítvány hozzáadva:",
- "You current certificate:": "A jelenlegi tanúsítványod: ",
- "Change it to {auth_type}/{auth_user_name}@{domain}": "Változtatás {auth_type}/{auth_user_name}@{domain}-ra",
- "Certificate changed to: {auth_type}/{auth_user_name}@{domain} .": "A tanúsítvány megváltozott {auth_type}/{auth_user_name}@{domain} -ra.",
- "Site cloned": "Az oldal klónozva",
-
- "You have successfully changed the web interface's language!": "Sikeresen átállítottad a web felület nyelvét!",
- "Due to the browser's caching, the full transformation could take some minute.": "A böngésző cache-elése miatt egy pár percig eltarthat a teljes átállás.",
-
- "Connection with UiServer Websocket was lost. Reconnecting...": "Az UiServer Websocket kapcsolat megszakadt. Újracsatlakozás...",
- "Connection with UiServer Websocket recovered.": "Az UiServer Websocket kapcsolat visszaállt.",
- "UiServer Websocket error, please reload the page.": "UiServer Websocket hiba, töltsd újra az oldalt!",
- " Connecting...": " Csatlakozás...",
- "Site size: ": "Oldal mérete: ",
- "MB is larger than default allowed ": "MB nagyobb, mint az engedélyezett ",
- "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Az oldal megnyitása és a korlát módosítása \" + site_info.next_size_limit + \"MB-ra",
- " files needs to be downloaded": " fájlt kell letölteni",
- " downloaded": " letöltve",
- " download failed": " letöltés sikertelen",
- "Peers found: ": "Talált csatlakozási pontok: ",
- "No peers found": "Nincs csatlakozási pont",
- "Running out of size limit (": "Az oldal hamarosan eléri a méretkorlátot (",
- "Set limit to \" + site_info.next_size_limit + \"MB": "A korlát módosítása \" + site_info.next_size_limit + \"MB-ra",
- "Site size limit changed to {0}MB": "A méretkorlát módosítva {0}MB-ra",
- " New version of this page has just released. Reload to see the modified content.": "Az oldal épp most módosult A megváltozott tartalomért töltsd újra!",
- "This site requests permission:": "Az oldal megtekintéséhez szükséges jog:",
- "_(Accept)": "Engedélyezés"
-
-}
diff --git a/src/Translate/languages/it.json b/src/Translate/languages/it.json
deleted file mode 100644
index 47992328..00000000
--- a/src/Translate/languages/it.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
- "Congratulations, your port {0} is opened. You are a full member of the ZeroNet network!": "Congratulazioni, la tua porta {0} è aperta. Ora sei un membro effettivo della rete ZeroNet!",
- "Tor mode active, every connection using Onion route.": "Modalità Tor attiva, ogni connessione sta usando la rete Onion.",
- "Successfully started Tor onion hidden services.": "Servizi Tor onion nascosti avviati con successo.",
- "Unable to start hidden services, please check your config.": "Impossibile avviare i servizi nascosti. Si prega di controllare la propria configurazione!",
- "For faster connections open {0} port on your router.": "Per avere connessioni più veloci aprire la porta {0} sul router.",
- "Your connection is restricted. Please, open {0} port on your router": "La tua connessione è limitata. Aprire la porta {0} sul router",
- "or configure Tor to become a full member of the ZeroNet network.": "o configurare Tor per diventare membro effettivo della rete ZeroNet!",
-
- "Select account you want to use in this site:": "Seleziona l'account che vuoi utilizzare per questo sito:",
- "currently selected": "attualmente selezionato",
- "Unique to site": "Unico sul sito",
-
- "Content signing failed": "Firma contenuti fallita",
- "Content publish queued for {0:.0f} seconds.": "Pubblicazione contenuti in coda per {0:.0f} secondi.",
- "Content published to {0} peers.": "Contenuti pubblicati su {0} peer.",
- "No peers found, but your content is ready to access.": "Nessun peer trovato, ma i tuoi contenuti sono pronti per l'accesso.",
- "Your network connection is restricted. Please, open {0} port": "La tua connessione di rete è limitata. Aprire la porta {0} ",
- "on your router to make your site accessible for everyone.": "sul router, per rendere il sito accessibile a chiunque.",
- "Content publish failed.": "Pubblicazione contenuti fallita.",
- "This file still in sync, if you write it now, then the previous content may be lost.": "Questo file è ancora in sincronizzazione, se viene modificato i contenuti precedenti andranno persi.",
- "Write content anyway": "Scrivere comunque i contenuti",
- "New certificate added:": "Aggiunto nuovo certificato:",
- "You current certificate:": "Il tuo attuale certificato:",
- "Change it to {auth_type}/{auth_user_name}@{domain}": "Cambiarlo in {auth_type}/{auth_user_name}@{domain}",
- "Certificate changed to: {auth_type}/{auth_user_name}@{domain} .": "Certificato cambianto in: {auth_type}/{auth_user_name}@{domain} .",
- "Site cloned": "Sito clonato",
-
- "You have successfully changed the web interface's language!": "Hai cambiato con successo la lingua dell'interfaccia web!",
- "Due to the browser's caching, the full transformation could take some minute.": "La trasformazione completa potrebbe richiedre alcuni minuti a causa della cache del browser.",
-
- "Connection with UiServer Websocket was lost. Reconnecting...": "La connessione con UiServer Websocket è andata persa. Riconnessione...",
- "Connection with UiServer Websocket recovered.": "Connessione con UiServer Websocket recuperata.",
- "UiServer Websocket error, please reload the page.": "Errore UiServer Websocket, ricaricare la pagina!",
- " Connecting...": " Connessione...",
- "Site size: ": "Dimensione del sito: ",
- "MB is larger than default allowed ": "MB è più grande del valore predefinito consentito ",
- "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Aprire il sito e impostare la dimensione limite a \" + site_info.next_size_limit + \"MB",
- " files needs to be downloaded": " i file devono essere scaricati",
- " downloaded": " scaricati",
- " download failed": " scaricamento fallito",
- "Peers found: ": "Peer trovati: ",
- "No peers found": "Nessun peer trovato",
- "Running out of size limit (": "Superato il limite di spazio (",
- "Set limit to \" + site_info.next_size_limit + \"MB": "Imposta il limite a \" + site_info.next_size_limit + \"MB",
- "Site size limit changed to {0}MB": "Limite di spazio cambiato a {0}MB",
- " New version of this page has just released. Reload to see the modified content.": "E' stata rilasciata una nuova versione di questa pagina Ricaricare per vedere il contenuto modificato!",
- "This site requests permission:": "Questo sito richiede permessi:",
- "_(Accept)": "Concedere"
-
-}
diff --git a/src/Translate/languages/jp.json b/src/Translate/languages/jp.json
deleted file mode 100644
index ff10aee4..00000000
--- a/src/Translate/languages/jp.json
+++ /dev/null
@@ -1,66 +0,0 @@
-{
- "Congratulations, your port {0} is opened. You are a full member of the ZeroNet network!": "おめでとうございます。ポート {0} が開きました。これでZeroNetネットワークのメンバーです。",
- "Tor mode active, every connection using Onion route.": "Torモードがアクティブです、全ての接続はOnionルートを使用します。",
- "Successfully started Tor onion hidden services.": "Tor onionサービスを正常に開始しました。",
- "Unable to start hidden services, please check your config.": "非表示のサービスを開始できません。設定を確認してください。",
- "For faster connections open {0} port on your router.": "接続を高速化するにはルーターのポート {0} を開けてください。",
- "Your connection is restricted. Please, open {0} port on your router": "接続が制限されています。ルーターのポート {0} を開けてください。",
- "or configure Tor to become a full member of the ZeroNet network.": "または、TorをZeroNetネットワークのメンバーになるように設定してください。",
-
- "Select account you want to use in this site:": "このサイトで使用するアカウントを選択:",
- "No certificate": "証明書がありません",
- "currently selected": "現在選択中",
- "Unique to site": "サイト固有",
-
- "Content signing failed": "コンテンツの署名に失敗",
- "Content publish queued for {0:.0f} seconds.": "コンテンツの公開は{0:.0f}秒のキューに入れられました。",
- "Content published to {0}/{1} peers.": "サイトの更新を通知済 {0}/{1} ピア",
- "Content published to {0} peers.": "{0}ピアに公開されたコンテンツ。",
- "No peers found, but your content is ready to access.": "ピアは見つかりませんでしたが、コンテンツにアクセスする準備ができました。",
- "Your network connection is restricted. Please, open {0} port": "ネットワーク接続が制限されています。ポート {0} を開いて、",
- "on your router to make your site accessible for everyone.": "誰でもサイトにアクセスできるようにしてください。",
- "Content publish failed.": "コンテンツの公開に失敗しました。",
- "This file still in sync, if you write it now, then the previous content may be lost.": "このファイルはまだ同期しています。今すぐ書き込むと、前のコンテンツが失われる可能性があります。",
- "Write content anyway": "とにかくコンテンツを書く",
- "New certificate added:": "新しい証明書が追加されました:",
- "You current certificate:": "現在の証明書:",
- "Change it to {auth_type}/{auth_user_name}@{domain}": "{auth_type}/{auth_user_name}@{domain} に変更",
- "Certificate changed to: {auth_type}/{auth_user_name}@{domain} .": "変更後の証明書: {auth_type}/{auth_user_name}@{domain} ",
- "Site cloned": "複製されたサイト",
-
- "You have successfully changed the web interface's language!": "Webインターフェースの言語が正常に変更されました!",
- "Due to the browser's caching, the full transformation could take some minute.": "ブラウザのキャッシュにより、完全な変換には数分かかる場合があります。",
-
- "Connection with UiServer Websocket was lost. Reconnecting...": "UiServer Websocket との接続が失われました。再接続しています...",
- "Connection with UiServer Websocket recovered.": "UiServer Websocket との接続が回復しました。",
- "UiServer Websocket error, please reload the page.": "UiServer Websocketエラー、ページをリロードしてください。",
- " Connecting...": " 接続しています...",
- "Site size: ": "サイトサイズ: ",
- "MB is larger than default allowed ": "MB はデフォルトの許容値よりも大きいです。 ",
- "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "サイトを開き、サイズ制限を \" + site_info.next_size_limit + \"MB に設定",
- " files needs to be downloaded": " ファイルをダウンロードする必要があります",
- " downloaded": " ダウンロード",
- " download failed": " ダウンロード失敗",
- "Peers found: ": "ピアが見つかりました: ",
- "No peers found": "ピアが見つかりません",
- "Running out of size limit (": "サイズ制限を使い果たしました (",
- "Set limit to \" + site_info.next_size_limit + \"MB": "制限を \" + site_info.next_size_limit + \"MB に設定",
- "Cloning site...": "サイトを複製中…",
- "Site size limit changed to {0}MB": "サイトのサイズ制限が {0}MB に変更されました",
- " New version of this page has just released. Reload to see the modified content.": " このページの新しいバージョンが公開されました。 変更されたコンテンツを見るには再読み込みしてください。",
- "This site requests permission:": "このサイトは権限を要求しています:",
- "_(Accept)": "_(許可)",
-
- "Save": "保存",
- "Trackers announcing": "トラッカーをお知らせ",
- "Error": "エラー",
- "Done": "完了",
- "Tracker connection error detected.": "トラッカー接続エラーが検出されました。",
-
- "Update ZeroNet client to latest version?": "ZeroNetクライアント を最新版に更新しますか?",
- "Update": "更新",
- "Restart ZeroNet client ?": "ZeroNetクライアントを再起動しますか?",
- "Restart": "再起動",
- "Shut down ZeroNet client ?": "ZeroNetクライアント を終了しますか?",
- "Shut down": "終了"
-}
diff --git a/src/Translate/languages/nl.json b/src/Translate/languages/nl.json
deleted file mode 100644
index 985cce7a..00000000
--- a/src/Translate/languages/nl.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
- "Congratulations, your port {0} is opened. You are a full member of the ZeroNet network!": "Gefeliciteerd, je poort {0} is geopend. Je bent een volledig lid van het ZeroNet netwerk!",
- "Tor mode active, every connection using Onion route.": "Tor modus actief, elke verbinding gebruikt een Onion route.",
- "Successfully started Tor onion hidden services.": "Tor onion verborgen diensten zijn met succes gestart.",
- "Unable to start hidden services, please check your config.": "Het was niet mogelijk om verborgen diensten te starten, controleer je configuratie.",
- "For faster connections open {0} port on your router.": "Voor snellere verbindingen open je de poort {0} op je router.",
- "Your connection is restricted. Please, open {0} port on your router": "Je verbinding is beperkt. Open altjeblieft poort {0} op je router",
- "or configure Tor to become a full member of the ZeroNet network.": "of configureer Tor om een volledig lid van het ZeroNet netwerk te worden.",
-
- "Select account you want to use in this site:": "Selecteer het account die je wilt gebruiken binnen deze site:",
- "currently selected": "huidige selectie",
- "Unique to site": "Uniek voor deze site",
-
- "Content signing failed": "Inhoud ondertekenen mislukt",
- "Content publish queued for {0:.0f} seconds.": "Publiceren van inhoud staat in de wachtrij voor {0:.0f} seconden.",
- "Content published to {0} peers.": "Inhoud is gepubliceerd naar {0} peers",
- "No peers found, but your content is ready to access.": "Geen peers gevonden, maar je inhoud is klaar voor toegang.",
- "Your network connection is restricted. Please, open {0} port": "Je netwerkverbinding is beperkt. Open alsjeblieft poort {0} ",
- "on your router to make your site accessible for everyone.": "op je router om je site toegankelijk te maken voor iedereen.",
- "Content publish failed.": "Inhoud publicatie mislukt.",
- "This file still in sync, if you write it now, then the previous content may be lost.": "Dit bestand is nog in sync, als je het nu overschrijft, dan is mogelijk de vorige inhoud verloren.",
- "Write content anyway": "Inhoud toch schrijven",
- "New certificate added:": "Nieuw certificaat toegevoegd:",
- "You current certificate:": "Je huidige certificaat:",
- "Change it to {auth_type}/{auth_user_name}@{domain}": "Verander het naar {auth_type}/{auth_user_name}@{domain}",
- "Certificate changed to: {auth_type}/{auth_user_name}@{domain} .": "Certificaat veranderd naar: {auth_type}/{auth_user_name}@{domain} .",
- "Site cloned": "Site gecloned",
-
- "You have successfully changed the web interface's language!": "Je hebt met succes de taal van de web interface aangepast!",
- "Due to the browser's caching, the full transformation could take some minute.": "Door caching van je browser kan de volledige transformatie enkele minuten duren.",
-
- "Connection with UiServer Websocket was lost. Reconnecting...": "Verbinding met UiServer Websocket verbroken. Opnieuw verbinden...",
- "Connection with UiServer Websocket recovered.": "Verbinding met UiServer Websocket hersteld.",
- "UiServer Websocket error, please reload the page.": "UiServer Websocket fout, herlaad alsjeblieft de pagina.",
- " Connecting...": " Verbinden...",
- "Site size: ": "Site grootte ",
- "MB is larger than default allowed ": "MB is groter dan de standaard toegestaan ",
- "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Open de site en stel de limeit op de grootte in op \" + site_info.next_size_limit + \"MB",
- " files needs to be downloaded": " bestanden moeten worden gedownload",
- " downloaded": " gedownload",
- " download failed": " download mislukt",
- "Peers found: ": "Peers gevonden: ",
- "No peers found": "Geen peers gevonden",
- "Running out of size limit (": "Limeit op grootte bereikt (",
- "Set limit to \" + site_info.next_size_limit + \"MB": "Stel limiet in op \" + site_info.next_size_limit + \"MB",
- "Site size limit changed to {0}MB": "Site limiet op grootte is veranderd naar {0}MB",
- " New version of this page has just released. Reload to see the modified content.": " Een nieuwe versie van deze pagina is zojuist uitgekomen. Herlaad de pagina om de bijgewerkte inhoud te zien.",
- "This site requests permission:": "Deze site vraagt om permissie:",
- "_(Accept)": "Toekennen"
-
-}
diff --git a/src/Translate/languages/pl.json b/src/Translate/languages/pl.json
deleted file mode 100644
index 679e909d..00000000
--- a/src/Translate/languages/pl.json
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "Congratulations, your port {0} is opened. You are a full member of the ZeroNet network!": "Gratulacje, twój port {0} jest otwarty. Jesteś pełnoprawnym użytkownikiem sieci ZeroNet!",
- "Tor mode active, every connection using Onion route.": "Tryb Tor aktywny, każde połączenie przy użyciu trasy Cebulowej.",
- "Successfully started Tor onion hidden services.": "Pomyślnie zainicjowano ukryte usługi cebulowe Tor.",
- "Unable to start hidden services, please check your config.": "Niezdolny do uruchomienia ukrytych usług, proszę sprawdź swoją konfigurację.",
- "For faster connections open {0} port on your router.": "Dla szybszego połączenia otwórz {0} port w swoim routerze.",
- "Your connection is restricted. Please, open {0} port on your router": "Połączenie jest ograniczone. Proszę, otwórz port {0} w swoim routerze",
- "or configure Tor to become a full member of the ZeroNet network.": "bądź skonfiguruj Tora by stać się pełnoprawnym użytkownikiem sieci ZeroNet.",
-
- "Select account you want to use in this site:": "Wybierz konto którego chcesz użyć na tej stronie:",
- "currently selected": "aktualnie wybrany",
- "Unique to site": "Unikatowy dla strony",
-
- "Content signing failed": "Podpisanie treści zawiodło",
- "Content publish queued for {0:.0f} seconds.": "Publikacja treści wstrzymana na {0:.0f} sekund(y).",
- "Content published to {0} peers.": "Treść opublikowana do {0} uzytkowników.",
- "No peers found, but your content is ready to access.": "Nie odnaleziono użytkowników, ale twoja treść jest dostępna.",
- "Your network connection is restricted. Please, open {0} port": "Twoje połączenie sieciowe jest ograniczone. Proszę, otwórz port {0} ",
- "on your router to make your site accessible for everyone.": "w swoim routerze, by twoja strona mogłabyć dostępna dla wszystkich.",
- "Content publish failed.": "Publikacja treści zawiodła.",
- "This file still in sync, if you write it now, then the previous content may be lost.": "Ten plik wciąż się synchronizuje, jeśli zapiszesz go teraz, poprzednia treść może zostać utracona.",
- "Write content anyway": "Zapisz treść mimo wszystko",
- "New certificate added:": "Nowy certyfikat dodany:",
- "You current certificate:": "Twój aktualny certyfikat: ",
- "Change it to {auth_type}/{auth_user_name}@{domain}": "Zmień na {auth_type}/{auth_user_name}@{domain}-ra",
- "Certificate changed to: {auth_type}/{auth_user_name}@{domain} .": "Certyfikat zmieniony na {auth_type}/{auth_user_name}@{domain} -ra.",
- "Site cloned": "Strona sklonowana",
-
- "You have successfully changed the web interface's language!": "Pomyślnie zmieniono język interfejsu stron!",
- "Due to the browser's caching, the full transformation could take some minute.": "Ze względu na buforowanie przeglądarki, pełna zmiana może zająć parę minutę.",
-
- "Connection with UiServer Websocket was lost. Reconnecting...": "Połączenie z UiServer Websocket zostało przerwane. Ponowne łączenie...",
- "Connection with UiServer Websocket recovered.": "Połączenie z UiServer Websocket przywrócone.",
- "UiServer Websocket error, please reload the page.": "Błąd UiServer Websocket, prosze odświeżyć stronę.",
- " Connecting...": " Łączenie...",
- "Site size: ": "Rozmiar strony: ",
- "MB is larger than default allowed ": "MB jest większy niż domyślnie dozwolony ",
- "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Otwórz stronę i ustaw limit na \" + site_info.next_size_limit + \"MBów",
- " files needs to be downloaded": " pliki muszą zostać ściągnięte",
- " downloaded": " ściągnięte",
- " download failed": " ściąganie nie powiodło się",
- "Peers found: ": "Odnaleziono użytkowników: ",
- "No peers found": "Nie odnaleziono użytkowników",
- "Running out of size limit (": "Limit rozmiaru na wyczerpaniu (",
- "Set limit to \" + site_info.next_size_limit + \"MB": "Ustaw limit na \" + site_info.next_size_limit + \"MBów",
- "Site size limit changed to {0}MB": "Rozmiar limitu strony zmieniony na {0}MBów",
- " New version of this page has just released. Reload to see the modified content.": "Nowa wersja tej strony właśnie została wydana. Odśwież by zobaczyć nową, zmodyfikowaną treść strony.",
- "This site requests permission:": "Ta strona wymaga uprawnień:",
- "_(Accept)": "Przyznaj uprawnienia",
-
- "Sign and publish": "Podpisz i opublikuj",
- "Restart ZeroNet client ?": "Uruchomić ponownie klienta ZeroNet?",
- "Restart": "Uruchom ponownie"
-}
diff --git a/src/Translate/languages/pt-br.json b/src/Translate/languages/pt-br.json
deleted file mode 100644
index a842684f..00000000
--- a/src/Translate/languages/pt-br.json
+++ /dev/null
@@ -1,57 +0,0 @@
-{
- "Congratulations, your port {0} is opened. You are a full member of the ZeroNet network!": "Parabéns, a porta{0} está aberta. Você é um membro completo da rede ZeroNet!",
- "Tor mode active, every connection using Onion route.": "Modo Tor ativado, todas as conexões usam a rota Onion.",
- "Successfully started Tor onion hidden services.": "Os serviços ocultos Tor onion foram inciados com sucesso.",
- "Unable to start hidden services, please check your config.": "Não foi possível iniciar os serviços ocultos, por favor verifique suas configurações.",
- "For faster connections open {0} port on your router.": "Para conexões mais rápidas, abra a porta {0} em seu roteador.",
- "Your connection is restricted. Please, open {0} port on your router": "Sua conexão está restrita. Por favor, abra a porta {0} em seu roteador",
- "or configure Tor to become a full member of the ZeroNet network.": "ou configure o Tor para se tornar um membro completo da rede ZeroNet.",
-
- "Select account you want to use in this site:": "Selecione a conta que deseja usar nesse site:",
- "currently selected": "atualmente selecionada",
- "Unique to site": "Única para o site",
-
- "Content signing failed": "Assinatura de conteúdo falhou",
- "Content publish queued for {0:.0f} seconds.": "Publicação de conteúdo na fila por {0:.0f} segundos.",
- "Content published to {0} peers.": "Conteúdo publicado para {0} peers.",
- "No peers found, but your content is ready to access.": "Nenhum peer encontrado, mas seu conteúdo está pronto para ser acessado.",
- "Your network connection is restricted. Please, open {0} port": "Sua conexão de rede está restrita. Por favor, abra a porta {0} ",
- "on your router to make your site accessible for everyone.": "em seu roteador para tornar seu site acessível para todos.",
- "Content publish failed.": "Publicação de conteúdo falhou.",
- "This file still in sync, if you write it now, then the previous content may be lost.": "Esse arquivo ainda está sincronizado, se escreve-lo agora o conteúdo anterior poderá ser perdido.",
- "Write content anyway": "Escrever o conteúdo mesmo assim",
- "New certificate added:": "Novo certificado adicionado:",
- "You current certificate:": "Seu certificado atual:",
- "Change it to {auth_type}/{auth_user_name}@{domain}": "Alterar para {auth_type}/{auth_user_name}@{domain}",
- "Certificate changed to: {auth_type}/{auth_user_name}@{domain} .": "Certificado alterado para: {auth_type}/{auth_user_name}@{domain} .",
- "Site cloned": "Site clonado",
-
- "You have successfully changed the web interface's language!": "Você alterou o idioma da interface web com sucesso!",
- "Due to the browser's caching, the full transformation could take some minute.": "Devido ao cache do navegador, a transformação completa pode levar alguns minutos.",
-
- "Connection with UiServer Websocket was lost. Reconnecting...": "A conexão com UiServer Websocket foi perdida. Reconectando...",
- "Connection with UiServer Websocket recovered.": "Conexão com UiServer Websocket recuperada.",
- "UiServer Websocket error, please reload the page.": "Erro de UiServer Websocket, por favor atualize a página.",
- " Connecting...": " Conectando...",
- "Site size: ": "Tamanho do site: ",
- "MB is larger than default allowed ": "MB é maior do que o tamanho permitido por padrão",
- "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Abrir site e definir limite de tamanho para \" + site_info.next_size_limit + \"MBs",
- " files needs to be downloaded": " os arquivos precisam ser baixados",
- " downloaded": " baixados",
- " download failed": " falha no download",
- "Peers found: ": "Peers encontrados: ",
- "No peers found": "Nenhum peer encontrado",
- "Running out of size limit (": "Passando do tamanho limite (",
- "Set limit to \" + site_info.next_size_limit + \"MB": "Definir limite para \" + site_info.next_size_limit + \"MB",
- "Site size limit changed to {0}MB": "Limite de tamanho do site alterado para {0}MBs",
- " New version of this page has just released. Reload to see the modified content.": " Uma nova versão desse site acaba de ser publicada. Atualize para ver o conteúdo modificado.",
- "This site requests permission:": "Esse site solicita permissão:",
- "_(Accept)": "Conceder",
-
- "Save": "Salvar",
- "Trackers announcing": "Trackers anunciando",
- "Error": "Erro",
- "Done": "Concluído",
- "Tracker connection error detected.": "Erro de conexão com tracker foi detectado."
-
-}
diff --git a/src/Translate/languages/ru.json b/src/Translate/languages/ru.json
deleted file mode 100644
index 96c84b91..00000000
--- a/src/Translate/languages/ru.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
- "Congratulations, your port {0} is opened. You are a full member of the ZeroNet network!": "Поздравляем, ваш порт {0} открыт. Вы полноценный участник сети ZeroNet!",
- "Tor mode active, every connection using Onion route.": "Режим Tor включен, все соединения осуществляются через Tor.",
- "Successfully started Tor onion hidden services.": "Скрытый сервис Tor запущено успешно.",
- "Unable to start hidden services, please check your config.": "Ошибка при запуске скрытого сервиса, пожалуйста проверьте настройки",
- "For faster connections open {0} port on your router.": "Для более быстрой работы сети откройте {0} порт на вашем роутере.",
- "Your connection is restricted. Please, open {0} port on your router": "Подключение ограничено. Пожалуйста откройте {0} порт на вашем роутере",
- "or configure Tor to become a full member of the ZeroNet network.": "или настройте Tor что бы стать полноценным участником сети ZeroNet.",
-
- "Select account you want to use in this site:": "Выберите аккаунт для использования на этом сайте:",
- "currently selected": "сейчас выбран",
- "Unique to site": "Уникальный для этого сайта",
-
- "Content signing failed": "Подпись контента не удалась",
- "Content publish queued for {0:.0f} seconds.": "Публикация контента поставлена в очередь {0:.0f} секунд.",
- "Content published to {0} peers.": "Контент опубликован на {0} пирах.",
- "No peers found, but your content is ready to access.": "Пиры не найдены, но ваш контент доступен.",
- "Your network connection is restricted. Please, open {0} port": "Ваше подключение ограничено. Пожалуйста откройте {0} порт. ",
- "on your router to make your site accessible for everyone.": "на вашем роутере, что бы ваш сайт стал доступнг посетителям.",
- "Content publish failed.": "Ошибка при публикации контента.",
- "This file still in sync, if you write it now, then the previous content may be lost.": "Этот файл всё еще синхронизируется, если продолжить его изменение, предыдущий контент может быть потерян.",
- "Write content anyway": "Записать контент в любом случае",
- "New certificate added:": "Добавлен новый сертификат:",
- "You current certificate:": "Ваш текущий сертификат: ",
- "Change it to {auth_type}/{auth_user_name}@{domain}": "Изменить его на {auth_type}/{auth_user_name}@{domain}",
- "Certificate changed to: {auth_type}/{auth_user_name}@{domain} .": "Сертификат изменен на: {auth_type}/{auth_user_name}@{domain} .",
- "Site cloned": "Сайт склонирован",
-
- "You have successfully changed the web interface's language!": "Язык интерфейса успешно изменен!",
- "Due to the browser's caching, the full transformation could take some minute.": "В зависимости от работы вашего браузера полное преобразование может занять пару минут.",
-
- "Connection with UiServer Websocket was lost. Reconnecting...": "Подключение к UiServer Websocket прервано. Переподключаюсь...",
- "Connection with UiServer Websocket recovered.": "Подключение к UiServer Websocket восстановлено.",
- "UiServer Websocket error, please reload the page.": "Ошибка UiServer Websocket , перезагрузите страницу!",
- " Connecting...": " Подключение...",
- "Site size: ": "Размер сайта: ",
- "MB is larger than default allowed ": "MB больше чем разрешено по умолчанию ",
- "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Открыть сайт и установить лимит занимаемого места на \" + site_info.next_size_limit + \"MB",
- " files needs to be downloaded": " файлы должны быть загружены",
- " downloaded": " загружено",
- " download failed": " ошибка загрузки",
- "Peers found: ": "Пиров найдено: ",
- "No peers found": "Пиры не найдены",
- "Running out of size limit (": "Доступное место закончилось (",
- "Set limit to \" + site_info.next_size_limit + \"MB": "Установить лимит на \" + site_info.next_size_limit + \"MB",
- "Site size limit changed to {0}MB": "Лимит памяти на диске изменен на {0}MB",
- " New version of this page has just released. Reload to see the modified content.": "Доступна новая версия данной страницы Обновите страницу, что бы увидеть изменения!",
- "This site requests permission:": "Данный сайт запрашивает разрешения:",
- "_(Accept)": "Предоставить"
-
-}
diff --git a/src/Translate/languages/sk.json b/src/Translate/languages/sk.json
deleted file mode 100644
index 8fb4554b..00000000
--- a/src/Translate/languages/sk.json
+++ /dev/null
@@ -1,57 +0,0 @@
-{
- "Congratulations, your port {0} is opened. You are a full member of the ZeroNet network!": "Blahoželáme, váš port {0} je otvorený. Ste úplným členom siete ZeroNet!",
- "Tor mode active, every connection using Onion route.": "Tor mód aktívny, všetky spojenia teraz používajú Onion sieť.",
- "Successfully started Tor onion hidden services.": "Tor úspešne spustený.",
- "Unable to start hidden services, please check your config.": "Nebolo možné spustiť Tor, prosím skontrolujte nastavenia.",
- "For faster connections open {0} port on your router.": "Pre rýchlejšie spojenie otvorte na vašom routery port {0} ",
- "Your connection is restricted. Please, open {0} port on your router": "Vaše pripojenie je obmedzené. Prosím otvorte port {0} na vašom routery.",
- "or configure Tor to become a full member of the ZeroNet network.": "alebo nastavte Tor aby ste sa tali plným členom siete ZeroNet.",
-
- "Select account you want to use in this site:": "Zvoľte účet ktorý chcete používať na tejto stránke:",
- "currently selected": "aktuálne zvolené",
- "Unique to site": "Unikátny pre stránku",
-
- "Content signing failed": "Podpísanie obsahu zlyhalo",
- "Content publish queued for {0:.0f} seconds.": "Podpísanie obsahu bude na rade za {0:.0f} sekúnd",
- "Content published to {0} peers.": "Obsah publikovaný {0} peer-erom",
- "No peers found, but your content is ready to access.": "Neboli nájdený žiadny peer-ery, ale váš obsah je pripravený pre prístup.",
- "Your network connection is restricted. Please, open {0} port": "Vaše pripojenie k sieti je obmedzené. Prosím otvorte port {0} na vašom routery.",
- "on your router to make your site accessible for everyone.": "na vašom routery aby bola vaša stránka prístupná pre všetkých.",
- "Content publish failed.": "Publikovanie obsahu zlyhalo.",
- "This file still in sync, if you write it now, then the previous content may be lost.": "Tento súbor sa stále synchronizuje, ak v ňom spravíte zmeny, predchádzajúci obsah sa môže stratiť.",
- "Write content anyway": "Aj tak spraviť zmeny",
- "New certificate added:": "Pridaný nový certifikát:",
- "You current certificate:": "Váš aktuálny certifikát:",
- "Change it to {auth_type}/{auth_user_name}@{domain}": "Zvoľte to na {auth_type}/{auth_user_name}@{domain}",
- "Certificate changed to: {auth_type}/{auth_user_name}@{domain} .": "Certifikát zmenený na: {auth_type}/{auth_user_name}@{domain} .",
- "Site cloned": "Stránka naklonovaná",
-
- "You have successfully changed the web interface's language!": "Úspešne ste zmenili jazyk webového rozhrania!",
- "Due to the browser's caching, the full transformation could take some minute.": "Kôli cachu webového prehliadavača, ceľková transformácia môže chvíĺu trvať.",
-
- "Connection with UiServer Websocket was lost. Reconnecting...": "Spojenie s UiServer Websocket bolo stratené. Znovu pripájame...",
- "Connection with UiServer Websocket recovered.": "Spojenie s UiServer Websocket obnovené.",
- "UiServer Websocket error, please reload the page.": "Chyba UiServer Websocket-u, prosím znovu načítajte stránku.",
- " Connecting...": " Pripájanie...",
- "Site size: ": "Veľkosť stránky: ",
- "MB is larger than default allowed ": "MB je viac ako povolená hodnota",
- "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Otvoriť stránku a nastaviť limit veľkosti na \" + site_info.next_size_limit + \"MB",
- " files needs to be downloaded": " súbory je potrebné stiahnuť",
- " downloaded": " stiahnuté",
- " download failed": " sťahovanie zlyhalo",
- "Peers found: ": "Peer-erov nájdených: ",
- "No peers found": "Neboli nájdený žiadny peer-ery",
- "Running out of size limit (": "Presahuje povolený limit veľkosti pamäte (",
- "Set limit to \" + site_info.next_size_limit + \"MB": "Nastaviť limit na \" + site_info.next_size_limit + \"MB ändern",
- "Site size limit changed to {0}MB": "Limit veľkosti pamäte nastavený na {0}MB",
- " New version of this page has just released. Reload to see the modified content.": " Bola vydaná nová verzia tejto stránky. Znovu načítajte túto stránku aby bolo vidieť zmeny.",
- "This site requests permission:": "Táto stránka vyžaduje povolenie:",
- "_(Accept)": "Udeliť",
-
- "on": "",
- "Oct": "Okt",
- "May": "Máj",
- "Jun": "Jún",
- "Jul": "Júl"
-
-}
diff --git a/src/Translate/languages/sl.json b/src/Translate/languages/sl.json
deleted file mode 100644
index 2aeb628e..00000000
--- a/src/Translate/languages/sl.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
- "Congratulations, your port {0} is opened. You are a full member of the ZeroNet network!": "Čestitke, vaša vrata {0} so odprta. Postali ste polnopravni član ZeroNet omrežja!",
- "Tor mode active, every connection using Onion route.": "Način Tor aktiven.",
- "Successfully started Tor onion hidden services.": "Storitve Tor uspešno zagnane.",
- "Unable to start hidden services, please check your config.": "Ni bilo mogoče zagnati Tor storitev. Preverite nastavitve.",
- "For faster connections open {0} port on your router.": "Za hitrejše povezave na svojem usmerjevalniku odprite vrata {0} .",
- "Your connection is restricted. Please, open {0} port on your router": "Vaša povezava je omejena. Na svojem usmerjevalniku odprite vrata {0} ",
- "or configure Tor to become a full member of the ZeroNet network.": "ali nastavite Tor, da postanete polnopravni član ZeroNet omrežja.",
-
- "Select account you want to use in this site:": "Izberite račun, ki ga želite uporabiti na tem spletnem mestu:",
- "currently selected": "trenutno izbrano",
- "Unique to site": "Edinstven za spletno mesto",
-
- "Content signing failed": "Podpisovanje vsebine ni uspelo",
- "Content publish queued for {0:.0f} seconds.": "Objava vsebine na čakanju za {0:.0f} sekund.",
- "Content published to {0} peers.": "Vsebina objavljena na {0} povezavah.",
- "No peers found, but your content is ready to access.": "Ni nobenih povezav, vendar je vaša vsebina pripravljena za dostop.",
- "Your network connection is restricted. Please, open {0} port": "Vaša povezava je omejena. Prosimo, odprite vrata {0} ",
- "on your router to make your site accessible for everyone.": "na vašem usmerjevalniku, da bo vaše spletno mesto dostopno za vse.",
- "Content publish failed.": "Objavljanje vsebine ni uspelo.",
- "This file still in sync, if you write it now, then the previous content may be lost.": "Ta datoteka se še vedno sinhronizira. Če jo uredite zdaj, se lahko zgodi, da bo prejšnja vsebina izgubljena.",
- "Write content anyway": "Vseeno uredi vsebino",
- "New certificate added:": "Dodano novo potrdilo:",
- "You current certificate:": "Trenutno potrdilo:",
- "Change it to {auth_type}/{auth_user_name}@{domain}": "Spremenite ga na {auth_type}/{auth_user_name}@{domain}",
- "Certificate changed to: {auth_type}/{auth_user_name}@{domain} .": "Potrdilo spremenjeno na: {auth_type}/{auth_user_name}@{domain} .",
- "Site cloned": "Stran klonirana",
-
- "You have successfully changed the web interface's language!": "Uspešno ste spremenili jezik spletnega vmesnika!",
- "Due to the browser's caching, the full transformation could take some minute.": "Zaradi predpomnjenja brskalnika lahko popolna preobrazba traja nekaj minut.",
-
- "Connection with UiServer Websocket was lost. Reconnecting...": "Povezava z UiServer Websocket je bila izgubljena. Ponovno povezovanje ...",
- "Connection with UiServer Websocket recovered.": "Povezava z UiServer Websocket je vzpostavljena.",
- "UiServer Websocket error, please reload the page.": "Napaka UiServer Websocket. Prosimo osvežite stran.",
- " Connecting...": " Povezovanje ...",
- "Site size: ": "Velikost strani: ",
- "MB is larger than default allowed ": "MB je večja od dovoljenih",
- "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Odpri to stran in nastavi omejitev na \" + site_info.next_size_limit + \"MB",
- " files needs to be downloaded": " datotek mora biti prenešenih",
- " downloaded": " preneseno",
- " download failed": " prenos ni uspel",
- "Peers found: ": "Najdene povezave: ",
- "No peers found": "Ni najdenih povezav",
- "Running out of size limit (": "Zmanjkuje dovoljenega prostora (",
- "Set limit to \" + site_info.next_size_limit + \"MB": "Nastavi omejitev na \" + site_info.next_size_limit + \"MB",
- "Site size limit changed to {0}MB": "Omejitev strani nastavljena na{0} MB",
- " New version of this page has just released. Reload to see the modified content.": " Ravnokar je bila objavljena nova različica te strani. Osvežite jo, da boste videli novo vsebino.",
- "This site requests permission:": "Ta stran zahteva dovoljenja:",
- "_(Accept)": "Dovoli"
-
-}
diff --git a/src/Translate/languages/tr.json b/src/Translate/languages/tr.json
deleted file mode 100644
index 09a1bdb5..00000000
--- a/src/Translate/languages/tr.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
- "Congratulations, your port {0} is opened. You are a full member of the ZeroNet network!": "Tebrikler, portunuz ({0} ) açık. Artık ZeroNet ağına katıldınız!",
- "Tor mode active, every connection using Onion route.": "Tor aktif, tüm bağlantılar Onion yönlendircisini kullanıyor.",
- "Successfully started Tor onion hidden services.": "Gizli Tor hizmetleri başlatıldı.",
- "Unable to start hidden services, please check your config.": "Gizli hizmetler başlatılamadı, lütfen ayarlarınızı kontrol ediniz.",
- "For faster connections open {0} port on your router.": "Daha hızlı bağlantı için {0} nolu portu bilgisayarınıza yönlendirin.",
- "Your connection is restricted. Please, open {0} port on your router": "Sınırlı bağlantı. Lütfen, {0} nolu portu bilgisayarınıza yönlendirin",
- "or configure Tor to become a full member of the ZeroNet network.": "ya da ZeroNet ağına tam olarak katılabilmek için Tor'u kullanın.",
-
- "Select account you want to use in this site:": "Bu sitede kullanmak için bir hesap seçiniz:",
- "currently selected": "kullanılan",
- "Unique to site": "Bu site için benzersiz",
-
- "Content signing failed": "İçerik imzalama başarısız oldu",
- "Content publish queued for {0:.0f} seconds.": "İçerik yayımlanmak üzere {0:.0f} saniyedir kuyrukta.",
- "Content published to {0} peers.": "İçerik {0} eşe dağıtıldı.",
- "No peers found, but your content is ready to access.": "Eş bulunamadı, ama içeriğiniz erişime hazır.",
- "Your network connection is restricted. Please, open {0} port": "Sınırlı bağlantı. Lütfen, {0} nolu portu bilgisayarınıza yönlendirin",
- "on your router to make your site accessible for everyone.": "böylece sitenizi herkes için erişilebilir yapabilirsiniz",
- "Content publish failed.": "İçerik yayımlama başarısız oldu.",
- "This file still in sync, if you write it now, then the previous content may be lost.": "Bu dosya hala güncelleniyor, eğer şimdi kaydederseniz, önceki içerik kaybolabilir.",
- "Write content anyway": "Yine de kaydet",
- "New certificate added:": "Yeni sertifika eklendi:",
- "You current certificate:": "Kullanılan sertifikanız:",
- "Change it to {auth_type}/{auth_user_name}@{domain}": "{auth_type}/{auth_user_name}@{domain} olarak değiştir.",
- "Certificate changed to: {auth_type}/{auth_user_name}@{domain} .": "{auth_type}/{auth_user_name}@{domain} olarak değiştirildi",
- "Site cloned": "Site klonlandı",
-
- "You have successfully changed the web interface's language!": "WEB ara yüzü için dil başarıyla değiştirildi!",
- "Due to the browser's caching, the full transformation could take some minute.": "Tam dönüşümün sağlanması, tarayıcı önbelleklemesi yüzünden zaman alabilir.",
-
- "Connection with UiServer Websocket was lost. Reconnecting...": "UiServer Websocket ile bağlantı kesildi. Yeniden bağlanılıyor...",
- "Connection with UiServer Websocket recovered.": "UiServer Websocket ile bağlantı yeniden kuruldu.",
- "UiServer Websocket error, please reload the page.": "UiServer Websocket hatası, lütfen sayfayı yenileyin.",
- " Connecting...": " Bağlanıyor...",
- "Site size: ": "Site boyutu: ",
- "MB is larger than default allowed ": "MB izin verilenden fazla ",
- "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Siteyi açın ve boyut sınırını \" + site_info.next_size_limit + \"MB'ye yükseltin",
- " files needs to be downloaded": " indirilmesi gereken dosyalar",
- " downloaded": " indirildi",
- " download failed": " indirme başarısız",
- "Peers found: ": "Bulunan eşler: ",
- "No peers found": "Eş bulunamadı",
- "Running out of size limit (": "Boyut sınırlamasını aştı (",
- "Set limit to \" + site_info.next_size_limit + \"MB": "Sınırlamayı \" + site_info.next_size_limit + \"MB'ye yükselt",
- "Site size limit changed to {0}MB": "Site boyut sınırlaması {0}MB olarak ayarlandı",
- " New version of this page has just released. Reload to see the modified content.": " Bu sayfanın yeni versiyonu yayımlandı. Değişen içeriği görmek için yeniden yükleyiniz.",
- "This site requests permission:": "Bu site bir izin istiyor:",
- "_(Accept)": "İzin ver"
-
-}
diff --git a/src/Translate/languages/zh-tw.json b/src/Translate/languages/zh-tw.json
deleted file mode 100644
index 0ec071b4..00000000
--- a/src/Translate/languages/zh-tw.json
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "Congratulations, your port {0} is opened. You are a full member of the ZeroNet network!": "祝賀,你的埠 ({0} ) 已經打開。 你已經是 ZeroNet 網路的正式成員了!",
- "Tor mode active, every connection using Onion route.": "Tor 模式啟用,每個連接正在使用洋蔥路由。",
- "Successfully started Tor onion hidden services.": "成功啟動 Tor 洋蔥隱藏服務。",
- "Unable to start hidden services, please check your config.": "無法打開隱藏服務,請檢查你的配置。",
- "For faster connections open {0} port on your router.": "為了更快的連接請在路由器上打開 {0} 埠。",
- "Your connection is restricted. Please, open {0} port on your router": "你的連接受限制。請在你的路由器上打開 {0} 埠",
- "or configure Tor to become a full member of the ZeroNet network.": "或者配置你的 Tor 來成為 ZeroNet 的正式成員。",
-
- "Select account you want to use in this site:": "選擇你要在這個網站使用的帳戶:",
- "currently selected": "當前選擇",
- "Unique to site": "網站獨有身份",
-
- "Content signing failed": "內容簽署失敗",
- "Content publish queued for {0:.0f} seconds.": "內容已加入 {0:.0f} 秒後的發佈隊列。",
- "Content published to {0}/{1} peers.": "內容已發佈到 {0}/{1} 個節點。",
- "Content published to {0} peers.": "內容已發佈到 {0} 個節點。",
- "No peers found, but your content is ready to access.": "找不到節點,但是你的內容已經準備好被訪問。",
- "Your network connection is restricted. Please, open {0} port": "你的網路連接受限制。請在你的路由器上打開 {0} 埠",
- "on your router to make your site accessible for everyone.": "確保你的網站能被每一個人訪問。",
- "Content publish failed.": "內容發佈失敗。",
- "This file still in sync, if you write it now, then the previous content may be lost.": "這個檔仍然在同步中,如果你現在寫入它,之前的內容可能會被丟失。",
- "Write content anyway": "強制寫入內容",
- "New certificate added:": "新證書:",
- "You current certificate:": "你當前的證書:",
- "Change it to {auth_type}/{auth_user_name}@{domain}": "改變至 {auth_type}/{auth_user_name}@{domain}-ra",
- "Certificate changed to: {auth_type}/{auth_user_name}@{domain} .": "證書更改至:{auth_type}/{auth_user_name}@{domain} 。",
- "Site cloned": "網站已克隆",
-
- "You have successfully changed the web interface's language!": "你已經成功改變了 Web 界面的語言!",
- "Due to the browser's caching, the full transformation could take some minute.": "由於你的瀏覽器緩存,完整的翻譯可能需要花幾分鐘。",
-
- "Connection with UiServer Websocket was lost. Reconnecting...": "UiServer Websocket 的連線已丟失。重新連線中...",
- "Connection with UiServer Websocket recovered.": "UiServer Websocket 的連線已恢復。",
- "UiServer Websocket error, please reload the page.": "UiServer Websocket 錯誤,請重新載入頁面。",
- " Connecting...": " 連線中...",
- "Site size: ": "網站大小:",
- "MB is larger than default allowed ": "MB 比預設允許的值更大 ",
- "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "打開網站並設定大小限制到 \" + site_info.next_size_limit + \"MB",
- " files needs to be downloaded": " 個檔需要下載",
- " downloaded": " 已下載",
- " download failed": " 下載失敗",
- "Peers found: ": "已找到節點:",
- "No peers found": "找不到節點",
- "Running out of size limit (": "超出大小限制",
- "Set limit to \" + site_info.next_size_limit + \"MB": "設定限制到 \" + site_info.next_size_limit + \"MB",
- "Cloning site...": "複製網站中...",
- "Site cloned": "網站已複製",
- "Site size limit changed to {0}MB": "網站大小限制已改變到 {0}MB",
- " New version of this page has just released. Reload to see the modified content.": " 本頁面的新版本已經發佈。 重新載入來查看更改後的內容。",
- "This site requests permission:": "這個網站的請求許可權:",
- "_(Accept)": "授權"
-
-}
diff --git a/src/Translate/languages/zh.json b/src/Translate/languages/zh.json
deleted file mode 100644
index 16a40b1a..00000000
--- a/src/Translate/languages/zh.json
+++ /dev/null
@@ -1,55 +0,0 @@
-{
- "Congratulations, your port {0} is opened. You are a full member of the ZeroNet network!": "祝贺,您的端口 ({0} ) 已经打开。 您已经是 ZeroNet 网络的正式成员了!",
- "Tor mode active, every connection using Onion route.": "Tor 模式启用,每个连接正在使用洋葱路由。",
- "Successfully started Tor onion hidden services.": "成功启动 Tor 洋葱隐藏服务。",
- "Unable to start hidden services, please check your config.": "无法打开隐藏服务,请检查您的配置。",
- "For faster connections open {0} port on your router.": "为了更快的连接请在路由器上打开 {0} 端口。",
- "Your connection is restricted. Please, open {0} port on your router": "您的连接受限制。请在您的路由器上打开 {0} 端口",
- "or configure Tor to become a full member of the ZeroNet network.": "或者配置您的 Tor 来成为 ZeroNet 的正式成员。",
-
- "Select account you want to use in this site:": "选择您要在这个网站使用的帐户:",
- "No certificate": "没有证书",
- "currently selected": "当前选择",
- "Unique to site": "网站独有身份",
-
- "Content signing failed": "内容签名失败",
- "Content publish queued for {0:.0f} seconds.": "内容已加入 {0:.0f} 秒后的发布队列。",
- "Content published to {0}/{1} peers.": "内容已发布到 {0}/{1} 个节点。",
- "Content published to {0} peers.": "内容已发布到 {0} 个节点。",
- "No peers found, but your content is ready to access.": "找不到节点,但是您的内容已经准备好被访问。",
- "Your network connection is restricted. Please, open {0} port": "您的网络连接受限制。请在您的路由器上打开 {0} 端口",
- "on your router to make your site accessible for everyone.": "确保您的站点能被每一个人访问。",
- "Content publish failed.": "内容发布失败。",
- "This file still in sync, if you write it now, then the previous content may be lost.": "这个文件仍然在同步中,如果您现在写入它,之前的内容可能会被丢失。",
- "Write content anyway": "强制写入内容",
- "New certificate added:": "新证书:",
- "You current certificate:": "您当前的证书:",
- "Change it to {auth_type}/{auth_user_name}@{domain}": "更改至 {auth_type}/{auth_user_name}@{domain}-ra",
- "Certificate changed to: {auth_type}/{auth_user_name}@{domain} .": "证书更改至:{auth_type}/{auth_user_name}@{domain} 。",
- "Site cloned": "站点已克隆",
-
- "You have successfully changed the web interface's language!": "您已经成功更改了 web 界面的语言!",
- "Due to the browser's caching, the full transformation could take some minute.": "由于您的浏览器缓存,完整的翻译可能需要花几分钟。",
-
- "Connection with UiServer Websocket was lost. Reconnecting...": "UiServer Websocket 的连接已丢失。重新连接中...",
- "Connection with UiServer Websocket recovered.": "UiServer Websocket 的连接已恢复。",
- "UiServer Websocket error, please reload the page.": "UiServer Websocket 错误,请重新加载页面。",
- " Connecting...": " 连接中...",
- "Site size: ": "站点大小:",
- "MB is larger than default allowed ": "MB 比默认允许的值更大 ",
- "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "打开站点并设置大小限制到 \" + site_info.next_size_limit + \"MB",
- " files needs to be downloaded": " 个文件需要下载",
- " downloaded": " 已下载",
- " download failed": " 下载失败",
- "Peers found: ": "已找到节点:",
- "No peers found": "找不到节点",
- "Running out of size limit (": "超出大小限制",
- "Set limit to \" + site_info.next_size_limit + \"MB": "设置限制到 \" + site_info.next_size_limit + \"MB",
- "Cloning site...": "克隆站点中...",
- "Site cloned": "站点已克隆",
- "Site size limit changed to {0}MB": "站点大小限制已更改到 {0}MB",
- " New version of this page has just released. Reload to see the modified content.": " 本页面的新版本已经发布。 重新加载来查看更改后的内容。",
- "This site requests permission:": "这个站点的请求权限:",
- "_(Accept)": "授权"
-
-}
diff --git a/src/Ui/UiRequest.py b/src/Ui/UiRequest.py
deleted file mode 100644
index 8f00efcb..00000000
--- a/src/Ui/UiRequest.py
+++ /dev/null
@@ -1,949 +0,0 @@
-import time
-import re
-import os
-import mimetypes
-import json
-import html
-import urllib
-import socket
-
-import gevent
-
-from Config import config
-from Site import SiteManager
-from User import UserManager
-from Plugin import PluginManager
-from Ui.UiWebsocket import UiWebsocket
-from Crypt import CryptHash
-from util import helper
-
-status_texts = {
- 200: "200 OK",
- 206: "206 Partial Content",
- 400: "400 Bad Request",
- 403: "403 Forbidden",
- 404: "404 Not Found",
- 500: "500 Internal Server Error",
-}
-
-content_types = {
- "asc": "application/pgp-keys",
- "css": "text/css",
- "gpg": "application/pgp-encrypted",
- "html": "text/html",
- "js": "application/javascript",
- "json": "application/json",
- "oga": "audio/ogg",
- "ogg": "application/ogg",
- "ogv": "video/ogg",
- "sig": "application/pgp-signature",
- "txt": "text/plain",
- "webmanifest": "application/manifest+json",
- "wasm": "application/wasm",
- "webp": "image/webp"
-}
-
-
-class SecurityError(Exception):
- pass
-
-
-@PluginManager.acceptPlugins
-class UiRequest(object):
-
- def __init__(self, server, get, env, start_response):
- if server:
- self.server = server
- self.log = server.log
- self.get = get # Get parameters
- self.env = env # Enviroment settings
- # ['CONTENT_LENGTH', 'CONTENT_TYPE', 'GATEWAY_INTERFACE', 'HTTP_ACCEPT', 'HTTP_ACCEPT_ENCODING', 'HTTP_ACCEPT_LANGUAGE',
- # 'HTTP_COOKIE', 'HTTP_CACHE_CONTROL', 'HTTP_HOST', 'HTTP_HTTPS', 'HTTP_ORIGIN', 'HTTP_PROXY_CONNECTION', 'HTTP_REFERER',
- # 'HTTP_USER_AGENT', 'PATH_INFO', 'QUERY_STRING', 'REMOTE_ADDR', 'REMOTE_PORT', 'REQUEST_METHOD', 'SCRIPT_NAME',
- # 'SERVER_NAME', 'SERVER_PORT', 'SERVER_PROTOCOL', 'SERVER_SOFTWARE', 'werkzeug.request', 'wsgi.errors',
- # 'wsgi.input', 'wsgi.multiprocess', 'wsgi.multithread', 'wsgi.run_once', 'wsgi.url_scheme', 'wsgi.version']
-
- self.start_response = start_response # Start response function
- self.user = None
- self.script_nonce = None # Nonce for script tags in wrapper html
-
- def learnHost(self, host):
- self.server.allowed_hosts.add(host)
- self.server.log.info("Added %s as allowed host" % host)
-
- def isHostAllowed(self, host):
- if host in self.server.allowed_hosts:
- return True
-
- # Allow any IP address as they are not affected by DNS rebinding
- # attacks
- if helper.isIp(host):
- self.learnHost(host)
- return True
-
- if ":" in host and helper.isIp(host.rsplit(":", 1)[0]): # Test without port
- self.learnHost(host)
- return True
-
- if self.isProxyRequest(): # Support for chrome extension proxy
- if self.isDomain(host):
- return True
- else:
- return False
-
- return False
-
- def isDomain(self, address):
- return self.server.site_manager.isDomainCached(address)
-
- def resolveDomain(self, domain):
- return self.server.site_manager.resolveDomainCached(domain)
-
- # Call the request handler function base on path
- def route(self, path):
- # Restict Ui access by ip
- if config.ui_restrict and self.env['REMOTE_ADDR'] not in config.ui_restrict:
- return self.error403(details=False)
-
- # Check if host allowed to do request
- if not self.isHostAllowed(self.env.get("HTTP_HOST")):
- ret_error = next(self.error403("Invalid host: %s" % self.env.get("HTTP_HOST"), details=False))
-
- http_get = self.env["PATH_INFO"]
- if self.env["QUERY_STRING"]:
- http_get += "?{0}".format(self.env["QUERY_STRING"])
- self_host = self.env["HTTP_HOST"].split(":")[0]
- self_ip = self.env["HTTP_HOST"].replace(self_host, socket.gethostbyname(self_host))
- link = "http://{0}{1}".format(self_ip, http_get)
- ret_body = """
- Start the client with --ui_host "{host}"
argument
- or access via ip: {link}
- """.format(
- host=html.escape(self.env["HTTP_HOST"]),
- link=html.escape(link)
- ).encode("utf8")
- return iter([ret_error, ret_body])
-
- # Prepend .bit host for transparent proxy
- if self.isDomain(self.env.get("HTTP_HOST")):
- path = re.sub("^/", "/" + self.env.get("HTTP_HOST") + "/", path)
- path = re.sub("^http://zero[/]+", "/", path) # Remove begining http://zero/ for chrome extension
- path = re.sub("^http://", "/", path) # Remove begining http for chrome extension .bit access
-
- # Sanitize request url
- path = path.replace("\\", "/")
- if "../" in path or "./" in path:
- return self.error403("Invalid path: %s" % path)
-
- if self.env["REQUEST_METHOD"] == "OPTIONS":
- if "/" not in path.strip("/"):
- content_type = self.getContentType("index.html")
- else:
- content_type = self.getContentType(path)
-
- extra_headers = {"Access-Control-Allow-Origin": "null"}
-
- self.sendHeader(content_type=content_type, extra_headers=extra_headers, noscript=True)
- return ""
-
- if path == "/":
- return self.actionIndex()
- elif path in ("/favicon.ico", "/apple-touch-icon.png"):
- return self.actionFile("src/Ui/media/img/%s" % path)
- # Internal functions
- elif "/ZeroNet-Internal/" in path:
- path = re.sub(".*?/ZeroNet-Internal/", "/", path)
- func = getattr(self, "action" + path.strip("/"), None) # Check if we have action+request_path function
- if func:
- return func()
- else:
- return self.error404(path)
- # Media
- elif path.startswith("/uimedia/"):
- return self.actionUiMedia(path)
- elif "/uimedia/" in path:
- # uimedia within site dir (for chrome extension)
- path = re.sub(".*?/uimedia/", "/uimedia/", path)
- return self.actionUiMedia(path)
- # Websocket
- elif path == "/Websocket":
- return self.actionWebsocket()
- # Debug
- elif path == "/Debug" and config.debug:
- return self.actionDebug()
- elif path == "/Console" and config.debug:
- return self.actionConsole()
- # Wrapper-less static files
- elif path.startswith("/raw/"):
- return self.actionSiteMedia(path.replace("/raw", "/media", 1), header_noscript=True)
-
- elif path.startswith("/add/"):
- return self.actionSiteAdd()
- # Site media wrapper
- else:
- if self.get.get("wrapper_nonce"):
- if self.get["wrapper_nonce"] in self.server.wrapper_nonces:
- self.server.wrapper_nonces.remove(self.get["wrapper_nonce"])
- return self.actionSiteMedia("/media" + path) # Only serve html files with frame
- else:
- self.server.log.warning("Invalid wrapper nonce: %s" % self.get["wrapper_nonce"])
- body = self.actionWrapper(path)
- else:
- body = self.actionWrapper(path)
- if body:
- return body
- else:
- func = getattr(self, "action" + path.strip("/"), None) # Check if we have action+request_path function
- if func:
- return func()
- else:
- ret = self.error404(path)
- return ret
-
- # The request is proxied by chrome extension or a transparent proxy
- def isProxyRequest(self):
- return self.env["PATH_INFO"].startswith("http://") or (self.server.allow_trans_proxy and self.isDomain(self.env.get("HTTP_HOST")))
-
- def isWebSocketRequest(self):
- return self.env.get("HTTP_UPGRADE") == "websocket"
-
- def isAjaxRequest(self):
- return self.env.get("HTTP_X_REQUESTED_WITH") == "XMLHttpRequest"
-
- # Get mime by filename
- def getContentType(self, file_name):
- file_name = file_name.lower()
- ext = file_name.rsplit(".", 1)[-1]
-
- if ext in content_types:
- content_type = content_types[ext]
- elif ext in ("ttf", "woff", "otf", "woff2", "eot", "sfnt", "collection"):
- content_type = "font/%s" % ext
- else:
- content_type = mimetypes.guess_type(file_name)[0]
-
- if not content_type:
- content_type = "application/octet-stream"
-
- return content_type.lower()
-
- # Return: Posted variables
- def getPosted(self):
- if self.env['REQUEST_METHOD'] == "POST":
- return dict(urllib.parse.parse_qsl(
- self.env['wsgi.input'].readline().decode()
- ))
- else:
- return {}
-
- # Return: Cookies based on self.env
- def getCookies(self):
- raw_cookies = self.env.get('HTTP_COOKIE')
- if raw_cookies:
- cookies = urllib.parse.parse_qsl(raw_cookies)
- return {key.strip(): val for key, val in cookies}
- else:
- return {}
-
- def getCurrentUser(self):
- if self.user:
- return self.user # Cache
- self.user = UserManager.user_manager.get() # Get user
- if not self.user:
- self.user = UserManager.user_manager.create()
- return self.user
-
- def getRequestUrl(self):
- if self.isProxyRequest():
- if self.env["PATH_INFO"].startswith("http://zero/"):
- return self.env["PATH_INFO"]
- else: # Add http://zero to direct domain access
- return self.env["PATH_INFO"].replace("http://", "http://zero/", 1)
- else:
- return self.env["wsgi.url_scheme"] + "://" + self.env["HTTP_HOST"] + self.env["PATH_INFO"]
-
- def getReferer(self):
- referer = self.env.get("HTTP_REFERER")
- if referer and self.isProxyRequest() and not referer.startswith("http://zero/"):
- return referer.replace("http://", "http://zero/", 1)
- else:
- return referer
-
- def isScriptNonceSupported(self):
- user_agent = self.env.get("HTTP_USER_AGENT")
- if "Edge/" in user_agent:
- is_script_nonce_supported = False
- elif "Safari/" in user_agent and "Chrome/" not in user_agent:
- is_script_nonce_supported = False
- else:
- is_script_nonce_supported = True
- return is_script_nonce_supported
-
- # Send response headers
- def sendHeader(self, status=200, content_type="text/html", noscript=False, allow_ajax=False, script_nonce=None, extra_headers=[]):
- headers = {}
- headers["Version"] = "HTTP/1.1"
- headers["Connection"] = "Keep-Alive"
- headers["Keep-Alive"] = "max=25, timeout=30"
- headers["X-Frame-Options"] = "SAMEORIGIN"
- if content_type != "text/html" and self.env.get("HTTP_REFERER") and self.isSameOrigin(self.getReferer(), self.getRequestUrl()):
- headers["Access-Control-Allow-Origin"] = "*" # Allow load font files from css
-
- if noscript:
- headers["Content-Security-Policy"] = "default-src 'none'; sandbox allow-top-navigation allow-forms; img-src *; font-src * data:; media-src *; style-src * 'unsafe-inline';"
- elif script_nonce and self.isScriptNonceSupported():
- headers["Content-Security-Policy"] = "default-src 'none'; script-src 'nonce-{0}'; img-src 'self' blob: data:; style-src 'self' blob: 'unsafe-inline'; connect-src *; frame-src 'self' blob:".format(script_nonce)
-
- if allow_ajax:
- headers["Access-Control-Allow-Origin"] = "null"
-
- if self.env["REQUEST_METHOD"] == "OPTIONS":
- # Allow json access
- headers["Access-Control-Allow-Headers"] = "Origin, X-Requested-With, Content-Type, Accept, Cookie, Range"
- headers["Access-Control-Allow-Credentials"] = "true"
-
- # Download instead of display file types that can be dangerous
- if re.findall("/svg|/xml|/x-shockwave-flash|/pdf", content_type):
- headers["Content-Disposition"] = "attachment"
-
- cacheable_type = (
- self.env["REQUEST_METHOD"] == "OPTIONS" or
- content_type.split("/", 1)[0] in ("image", "video", "font") or
- content_type in ("application/javascript", "text/css")
- )
-
- if content_type in ("text/plain", "text/html", "text/css", "application/javascript", "application/json", "application/manifest+json"):
- content_type += "; charset=utf-8"
-
- if status in (200, 206) and cacheable_type: # Cache Css, Js, Image files for 10min
- headers["Cache-Control"] = "public, max-age=600" # Cache 10 min
- else:
- headers["Cache-Control"] = "no-cache, no-store, private, must-revalidate, max-age=0" # No caching at all
- headers["Content-Type"] = content_type
- headers.update(extra_headers)
- return self.start_response(status_texts[status], list(headers.items()))
-
- # Renders a template
- def render(self, template_path, *args, **kwargs):
- template = open(template_path, encoding="utf8").read()
-
- def renderReplacer(m):
- if m.group(1) in kwargs:
- return "%s" % kwargs.get(m.group(1), "")
- else:
- return m.group(0)
-
- template_rendered = re.sub("{(.*?)}", renderReplacer, template)
-
- return template_rendered.encode("utf8")
-
- def isWrapperNecessary(self, path):
- match = re.match(r"/(?P[A-Za-z0-9\._-]+)(?P/.*|$)", path)
-
- if not match:
- return True
-
- inner_path = match.group("inner_path").lstrip("/")
- if not inner_path or path.endswith("/"): # It's a directory
- content_type = self.getContentType("index.html")
- else: # It's a file
- content_type = self.getContentType(inner_path)
-
- is_html_file = "html" in content_type or "xhtml" in content_type
-
- return is_html_file
-
- @helper.encodeResponse
- def formatRedirect(self, url):
- return """
-
-
- Redirecting to {0}
-
-
-
- """.format(html.escape(url))
-
- # - Actions -
-
- # Redirect to an url
- def actionRedirect(self, url):
- self.start_response('301 Redirect', [('Location', str(url))])
- yield self.formatRedirect(url)
-
- def actionIndex(self):
- return self.actionRedirect("/" + config.homepage + "/")
-
- # Render a file from media with iframe site wrapper
- def actionWrapper(self, path, extra_headers=None):
- if not extra_headers:
- extra_headers = {}
- script_nonce = self.getScriptNonce()
-
- match = re.match(r"/(?P[A-Za-z0-9\._-]+)(?P/.*|$)", path)
- just_added = False
- if match:
- address = match.group("address")
- inner_path = match.group("inner_path").lstrip("/")
-
- if not self.isWrapperNecessary(path):
- return self.actionSiteMedia("/media" + path) # Serve non-html files without wrapper
-
- if self.isAjaxRequest():
- return self.error403("Ajax request not allowed to load wrapper") # No ajax allowed on wrapper
-
- if self.isWebSocketRequest():
- return self.error403("WebSocket request not allowed to load wrapper") # No websocket
-
- if "text/html" not in self.env.get("HTTP_ACCEPT", ""):
- return self.error403("Invalid Accept header to load wrapper: %s" % self.env.get("HTTP_ACCEPT", ""))
- if "prefetch" in self.env.get("HTTP_X_MOZ", "") or "prefetch" in self.env.get("HTTP_PURPOSE", ""):
- return self.error403("Prefetch not allowed to load wrapper")
-
- site = SiteManager.site_manager.get(address)
-
- if site and site.content_manager.contents.get("content.json"):
- title = site.content_manager.contents["content.json"]["title"]
- else:
- title = "Loading %s..." % address
- site = SiteManager.site_manager.get(address)
- if site: # Already added, but not downloaded
- if time.time() - site.announcer.time_last_announce > 5:
- site.log.debug("Reannouncing site...")
- gevent.spawn(site.update, announce=True)
- else: # If not added yet
- site = SiteManager.site_manager.need(address)
- just_added = True
-
- if not site:
- return False
-
- self.sendHeader(extra_headers=extra_headers, script_nonce=script_nonce)
-
- min_last_announce = (time.time() - site.announcer.time_last_announce) / 60
- if min_last_announce > 60 and site.isServing() and not just_added:
- site.log.debug("Site requested, but not announced recently (last %.0fmin ago). Updating..." % min_last_announce)
- gevent.spawn(site.update, announce=True)
-
- return iter([self.renderWrapper(site, path, inner_path, title, extra_headers, script_nonce=script_nonce)])
- # Make response be sent at once (see https://github.com/HelloZeroNet/ZeroNet/issues/1092)
-
- else: # Bad url
- return False
-
- def getSiteUrl(self, address):
- if self.isProxyRequest():
- return "http://zero/" + address
- else:
- return "/" + address
-
- def getWsServerUrl(self):
- if self.isProxyRequest():
- if self.env["REMOTE_ADDR"] == "127.0.0.1": # Local client, the server address also should be 127.0.0.1
- server_url = "http://127.0.0.1:%s" % self.env["SERVER_PORT"]
- else: # Remote client, use SERVER_NAME as server's real address
- server_url = "http://%s:%s" % (self.env["SERVER_NAME"], self.env["SERVER_PORT"])
- else:
- server_url = ""
- return server_url
-
- def processQueryString(self, site, query_string):
- match = re.search("zeronet_peers=(.*?)(&|$)", query_string)
- if match:
- query_string = query_string.replace(match.group(0), "")
- num_added = 0
- for peer in match.group(1).split(","):
- if not re.match(".*?:[0-9]+$", peer):
- continue
- ip, port = peer.rsplit(":", 1)
- if site.addPeer(ip, int(port), source="query_string"):
- num_added += 1
- site.log.debug("%s peers added by query string" % num_added)
-
- return query_string
-
- def renderWrapper(self, site, path, inner_path, title, extra_headers, show_loadingscreen=None, script_nonce=None):
- file_inner_path = inner_path
- if not file_inner_path:
- file_inner_path = "index.html" # If inner path defaults to index.html
-
- if file_inner_path.endswith("/"):
- file_inner_path = file_inner_path + "index.html"
-
- address = re.sub("/.*", "", path.lstrip("/"))
- if self.isProxyRequest() and (not path or "/" in path[1:]):
- if self.env["HTTP_HOST"] == "zero":
- root_url = "/" + address + "/"
- file_url = "/" + address + "/" + inner_path
- else:
- file_url = "/" + inner_path
- root_url = "/"
-
- else:
- file_url = "/" + address + "/" + inner_path
- root_url = "/" + address + "/"
-
- if self.isProxyRequest():
- self.server.allowed_ws_origins.add(self.env["HTTP_HOST"])
-
- # Wrapper variable inits
- body_style = ""
- meta_tags = ""
- postmessage_nonce_security = "false"
-
- wrapper_nonce = self.getWrapperNonce()
- inner_query_string = self.processQueryString(site, self.env.get("QUERY_STRING", ""))
-
- if "?" in inner_path:
- sep = "&"
- else:
- sep = "?"
-
- if inner_query_string:
- inner_query_string = "%s%s&wrapper_nonce=%s" % (sep, inner_query_string, wrapper_nonce)
- else:
- inner_query_string = "%swrapper_nonce=%s" % (sep, wrapper_nonce)
-
- if self.isProxyRequest(): # Its a remote proxy request
- homepage = "http://zero/" + config.homepage
- else: # Use relative path
- homepage = "/" + config.homepage
-
- server_url = self.getWsServerUrl() # Real server url for WS connections
-
- user = self.getCurrentUser()
- if user:
- theme = user.settings.get("theme", "light")
- else:
- theme = "light"
-
- themeclass = "theme-%-6s" % re.sub("[^a-z]", "", theme)
-
- if site.content_manager.contents.get("content.json"): # Got content.json
- content = site.content_manager.contents["content.json"]
- if content.get("background-color"):
- background_color = content.get("background-color-%s" % theme, content["background-color"])
- body_style += "background-color: %s;" % html.escape(background_color)
- if content.get("viewport"):
- meta_tags += ' ' % html.escape(content["viewport"])
- if content.get("favicon"):
- meta_tags += ' ' % (root_url, html.escape(content["favicon"]))
- if content.get("postmessage_nonce_security"):
- postmessage_nonce_security = "true"
-
- sandbox_permissions = ""
-
- if "NOSANDBOX" in site.settings["permissions"]:
- sandbox_permissions += " allow-same-origin"
-
- if show_loadingscreen is None:
- show_loadingscreen = not site.storage.isFile(file_inner_path)
-
- return self.render(
- "src/Ui/template/wrapper.html",
- server_url=server_url,
- inner_path=inner_path,
- file_url=re.escape(file_url),
- file_inner_path=re.escape(file_inner_path),
- address=site.address,
- title=html.escape(title),
- body_style=body_style,
- meta_tags=meta_tags,
- query_string=re.escape(inner_query_string),
- wrapper_key=site.settings["wrapper_key"],
- ajax_key=site.settings["ajax_key"],
- wrapper_nonce=wrapper_nonce,
- postmessage_nonce_security=postmessage_nonce_security,
- permissions=json.dumps(site.settings["permissions"]),
- show_loadingscreen=json.dumps(show_loadingscreen),
- sandbox_permissions=sandbox_permissions,
- rev=config.rev,
- lang=config.language,
- homepage=homepage,
- themeclass=themeclass,
- script_nonce=script_nonce
- )
-
- # Create a new wrapper nonce that allows to get one html file without the wrapper
- def getWrapperNonce(self):
- wrapper_nonce = CryptHash.random()
- self.server.wrapper_nonces.append(wrapper_nonce)
- return wrapper_nonce
-
- def getScriptNonce(self):
- if not self.script_nonce:
- self.script_nonce = CryptHash.random(encoding="base64")
-
- return self.script_nonce
-
- # Create a new wrapper nonce that allows to get one site
- def getAddNonce(self):
- add_nonce = CryptHash.random()
- self.server.add_nonces.append(add_nonce)
- return add_nonce
-
- def isSameOrigin(self, url_a, url_b):
- if not url_a or not url_b:
- return False
-
- url_a = url_a.replace("/raw/", "/")
- url_b = url_b.replace("/raw/", "/")
-
- origin_pattern = "http[s]{0,1}://(.*?/.*?/).*"
- is_origin_full = re.match(origin_pattern, url_a)
- if not is_origin_full: # Origin looks trimmed to host, require only same host
- origin_pattern = "http[s]{0,1}://(.*?/).*"
-
- origin_a = re.sub(origin_pattern, "\\1", url_a)
- origin_b = re.sub(origin_pattern, "\\1", url_b)
-
- return origin_a == origin_b
-
- # Return {address: 1Site.., inner_path: /data/users.json} from url path
- def parsePath(self, path):
- path = path.replace("\\", "/")
- path = path.replace("/index.html/", "/") # Base Backward compatibility fix
- if path.endswith("/"):
- path = path + "index.html"
-
- if "../" in path or "./" in path:
- raise SecurityError("Invalid path")
-
- match = re.match(r"/media/(?P[A-Za-z0-9]+[A-Za-z0-9\._-]+)(?P/.*|$)", path)
- if match:
- path_parts = match.groupdict()
- if self.isDomain(path_parts["address"]):
- path_parts["address"] = self.resolveDomain(path_parts["address"])
- path_parts["request_address"] = path_parts["address"] # Original request address (for Merger sites)
- path_parts["inner_path"] = path_parts["inner_path"].lstrip("/")
- if not path_parts["inner_path"]:
- path_parts["inner_path"] = "index.html"
- return path_parts
- else:
- return None
-
- # Serve a media for site
- def actionSiteMedia(self, path, header_length=True, header_noscript=False):
- try:
- path_parts = self.parsePath(path)
- except SecurityError as err:
- return self.error403(err)
-
- if not path_parts:
- return self.error404(path)
-
- address = path_parts["address"]
-
- file_path = "%s/%s/%s" % (config.data_dir, address, path_parts["inner_path"])
-
- if (config.debug or config.merge_media) and file_path.split("/")[-1].startswith("all."):
- # If debugging merge *.css to all.css and *.js to all.js
- site = self.server.sites.get(address)
- if site and site.settings["own"]:
- from Debug import DebugMedia
- DebugMedia.merge(file_path)
-
- if not address or address == ".":
- return self.error403(path_parts["inner_path"])
-
- header_allow_ajax = False
- if self.get.get("ajax_key"):
- site = SiteManager.site_manager.get(path_parts["request_address"])
- if self.get["ajax_key"] == site.settings["ajax_key"]:
- header_allow_ajax = True
- else:
- return self.error403("Invalid ajax_key")
-
- file_size = helper.getFilesize(file_path)
-
- if file_size is not None:
- return self.actionFile(file_path, header_length=header_length, header_noscript=header_noscript, header_allow_ajax=header_allow_ajax, file_size=file_size, path_parts=path_parts)
-
- elif os.path.isdir(file_path): # If this is actually a folder, add "/" and redirect
- if path_parts["inner_path"]:
- return self.actionRedirect("./%s/" % path_parts["inner_path"].split("/")[-1])
- else:
- return self.actionRedirect("./%s/" % path_parts["address"])
-
- else: # File not exists, try to download
- if address not in SiteManager.site_manager.sites: # Only in case if site already started downloading
- return self.actionSiteAddPrompt(path)
-
- site = SiteManager.site_manager.need(address)
-
- if path_parts["inner_path"].endswith("favicon.ico"): # Default favicon for all sites
- return self.actionFile("src/Ui/media/img/favicon.ico")
-
- result = site.needFile(path_parts["inner_path"], priority=15) # Wait until file downloads
- if result:
- file_size = helper.getFilesize(file_path)
- return self.actionFile(file_path, header_length=header_length, header_noscript=header_noscript, header_allow_ajax=header_allow_ajax, file_size=file_size, path_parts=path_parts)
- else:
- self.log.debug("File not found: %s" % path_parts["inner_path"])
- return self.error404(path)
-
- # Serve a media for ui
- def actionUiMedia(self, path):
- match = re.match("/uimedia/(?P.*)", path)
- if match: # Looks like a valid path
- file_path = "src/Ui/media/%s" % match.group("inner_path")
- allowed_dir = os.path.abspath("src/Ui/media") # Only files within data/sitehash allowed
- if "../" in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir):
- # File not in allowed path
- return self.error403()
- else:
- if (config.debug or config.merge_media) and match.group("inner_path").startswith("all."):
- # If debugging merge *.css to all.css and *.js to all.js
- from Debug import DebugMedia
- DebugMedia.merge(file_path)
- return self.actionFile(file_path, header_length=False) # Dont's send site to allow plugins append content
-
- else: # Bad url
- return self.error400()
-
- def actionSiteAdd(self):
- post_data = self.env["wsgi.input"].read().decode()
- post = dict(urllib.parse.parse_qsl(post_data))
- if post["add_nonce"] not in self.server.add_nonces:
- return self.error403("Add nonce error.")
- self.server.add_nonces.remove(post["add_nonce"])
- SiteManager.site_manager.need(post["address"])
- return self.actionRedirect(post["url"])
-
- @helper.encodeResponse
- def actionSiteAddPrompt(self, path):
- path_parts = self.parsePath(path)
- if not path_parts or not self.server.site_manager.isAddress(path_parts["address"]):
- return self.error404(path)
-
- self.sendHeader(200, "text/html", noscript=True)
- template = open("src/Ui/template/site_add.html").read()
- template = template.replace("{url}", html.escape(self.env["PATH_INFO"]))
- template = template.replace("{address}", path_parts["address"])
- template = template.replace("{add_nonce}", self.getAddNonce())
- return template
-
- def replaceHtmlVariables(self, block, path_parts):
- user = self.getCurrentUser()
- themeclass = "theme-%-6s" % re.sub("[^a-z]", "", user.settings.get("theme", "light"))
- block = block.replace(b"{themeclass}", themeclass.encode("utf8"))
-
- if path_parts:
- site = self.server.sites.get(path_parts.get("address"))
- if site.settings["own"]:
- modified = int(time.time())
- else:
- modified = int(site.content_manager.contents["content.json"]["modified"])
- block = block.replace(b"{site_modified}", str(modified).encode("utf8"))
-
- return block
-
- # Stream a file to client
- def actionFile(self, file_path, block_size=64 * 1024, send_header=True, header_length=True, header_noscript=False, header_allow_ajax=False, extra_headers={}, file_size=None, file_obj=None, path_parts=None):
- file_name = os.path.basename(file_path)
-
- if file_size is None:
- file_size = helper.getFilesize(file_path)
-
- if file_size is not None:
- # Try to figure out content type by extension
- content_type = self.getContentType(file_name)
-
- range = self.env.get("HTTP_RANGE")
- range_start = None
-
- is_html_file = file_name.endswith(".html")
- if is_html_file:
- header_length = False
-
- if send_header:
- extra_headers = extra_headers.copy()
- content_encoding = self.get.get("zeronet_content_encoding", "")
- if all(part.strip() in ("gzip", "compress", "deflate", "identity", "br") for part in content_encoding.split(",")):
- extra_headers["Content-Encoding"] = content_encoding
- extra_headers["Accept-Ranges"] = "bytes"
- if header_length:
- extra_headers["Content-Length"] = str(file_size)
- if range:
- range_start = int(re.match(".*?([0-9]+)", range).group(1))
- if re.match(".*?-([0-9]+)", range):
- range_end = int(re.match(".*?-([0-9]+)", range).group(1)) + 1
- else:
- range_end = file_size
- extra_headers["Content-Length"] = str(range_end - range_start)
- extra_headers["Content-Range"] = "bytes %s-%s/%s" % (range_start, range_end - 1, file_size)
- if range:
- status = 206
- else:
- status = 200
- self.sendHeader(status, content_type=content_type, noscript=header_noscript, allow_ajax=header_allow_ajax, extra_headers=extra_headers)
- if self.env["REQUEST_METHOD"] != "OPTIONS":
- if not file_obj:
- file_obj = open(file_path, "rb")
-
- if range_start:
- file_obj.seek(range_start)
- while 1:
- try:
- block = file_obj.read(block_size)
- if is_html_file:
- block = self.replaceHtmlVariables(block, path_parts)
- if block:
- yield block
- else:
- raise StopIteration
- except StopIteration:
- file_obj.close()
- break
- else: # File not exists
- for part in self.error404(str(file_path)):
- yield part
-
- # On websocket connection
- def actionWebsocket(self):
- ws = self.env.get("wsgi.websocket")
-
- if ws:
- # Allow only same-origin websocket requests
- origin = self.env.get("HTTP_ORIGIN")
- host = self.env.get("HTTP_HOST")
- # Allow only same-origin websocket requests
- if origin:
- origin_host = origin.split("://", 1)[-1]
- if origin_host != host and origin_host not in self.server.allowed_ws_origins:
- error_message = "Invalid origin: %s (host: %s, allowed: %s)" % (origin, host, self.server.allowed_ws_origins)
- ws.send(json.dumps({"error": error_message}))
- return self.error403(error_message)
-
- # Find site by wrapper_key
- wrapper_key = self.get["wrapper_key"]
- site = None
- for site_check in list(self.server.sites.values()):
- if site_check.settings["wrapper_key"] == wrapper_key:
- site = site_check
-
- if site: # Correct wrapper key
- try:
- user = self.getCurrentUser()
- except Exception as err:
- ws.send(json.dumps({"error": "Error in data/user.json: %s" % err}))
- return self.error500("Error in data/user.json: %s" % err)
- if not user:
- ws.send(json.dumps({"error": "No user found"}))
- return self.error403("No user found")
- ui_websocket = UiWebsocket(ws, site, self.server, user, self)
- site.websockets.append(ui_websocket) # Add to site websockets to allow notify on events
- self.server.websockets.append(ui_websocket)
- ui_websocket.start()
- self.server.websockets.remove(ui_websocket)
- for site_check in list(self.server.sites.values()):
- # Remove websocket from every site (admin sites allowed to join other sites event channels)
- if ui_websocket in site_check.websockets:
- site_check.websockets.remove(ui_websocket)
- return [b"Bye."]
- else: # No site found by wrapper key
- ws.send(json.dumps({"error": "Wrapper key not found: %s" % wrapper_key}))
- return self.error403("Wrapper key not found: %s" % wrapper_key)
- else:
- self.start_response("400 Bad Request", [])
- return [b"Not a websocket request!"]
-
- # Debug last error
- def actionDebug(self):
- # Raise last error from DebugHook
- import main
- last_error = main.DebugHook.last_error
- if last_error:
- raise last_error[0](last_error[1]).with_traceback(last_error[2])
- else:
- self.sendHeader()
- return [b"No error! :)"]
-
- # Just raise an error to get console
- def actionConsole(self):
- import sys
- sites = self.server.sites
- main = sys.modules["main"]
-
- def bench(code, times=100, init=None):
- sites = self.server.sites
- main = sys.modules["main"]
- s = time.time()
- if init:
- eval(compile(init, '', 'exec'), globals(), locals())
- for _ in range(times):
- back = eval(code, globals(), locals())
- return ["%s run: %.3fs" % (times, time.time() - s), back]
- raise Exception("Here is your console")
-
- # - Tests -
-
- def actionTestStream(self):
- self.sendHeader()
- yield " " * 1080 # Overflow browser's buffer
- yield "He"
- time.sleep(1)
- yield "llo!"
- # yield "Running websockets: %s" % len(self.server.websockets)
- # self.server.sendMessage("Hello!")
-
- # - Errors -
-
- # Send bad request error
- def error400(self, message=""):
- self.sendHeader(400, noscript=True)
- self.log.error("Error 400: %s" % message)
- return self.formatError("Bad Request", message)
-
- # You are not allowed to access this
- def error403(self, message="", details=True):
- self.sendHeader(403, noscript=True)
- self.log.warning("Error 403: %s" % message)
- return self.formatError("Forbidden", message, details=details)
-
- # Send file not found error
- def error404(self, path=""):
- self.sendHeader(404, noscript=True)
- return self.formatError("Not Found", path, details=False)
-
- # Internal server error
- def error500(self, message=":("):
- self.sendHeader(500, noscript=True)
- self.log.error("Error 500: %s" % message)
- return self.formatError("Server error", message)
-
- @helper.encodeResponse
- def formatError(self, title, message, details=True):
- import sys
- import gevent
-
- if details and config.debug:
- details = {key: val for key, val in list(self.env.items()) if hasattr(val, "endswith") and "COOKIE" not in key}
- details["version_zeronet"] = "%s r%s" % (config.version, config.rev)
- details["version_python"] = sys.version
- details["version_gevent"] = gevent.__version__
- details["plugins"] = PluginManager.plugin_manager.plugin_names
- arguments = {key: val for key, val in vars(config.arguments).items() if "password" not in key}
- details["arguments"] = arguments
- return """
-
- %s
- %s
- Please report it if you think this an error.
- Details:
- %s
- """ % (title, html.escape(message), html.escape(json.dumps(details, indent=4, sort_keys=True)))
- else:
- return """
-
- %s
- %s
- """ % (title, html.escape(message))
diff --git a/src/Ui/UiServer.py b/src/Ui/UiServer.py
deleted file mode 100644
index 9d93ccfd..00000000
--- a/src/Ui/UiServer.py
+++ /dev/null
@@ -1,206 +0,0 @@
-import logging
-import time
-import urllib
-import socket
-import gevent
-
-from gevent.pywsgi import WSGIServer
-from lib.gevent_ws import WebSocketHandler
-
-from .UiRequest import UiRequest
-from Site import SiteManager
-from Config import config
-from Debug import Debug
-import importlib
-
-
-# Skip websocket handler if not necessary
-class UiWSGIHandler(WebSocketHandler):
-
- def __init__(self, *args, **kwargs):
- self.server = args[2]
- super(UiWSGIHandler, self).__init__(*args, **kwargs)
- self.args = args
- self.kwargs = kwargs
-
- def handleError(self, err):
- if config.debug: # Allow websocket errors to appear on /Debug
- import main
- main.DebugHook.handleError()
- else:
- ui_request = UiRequest(self.server, {}, self.environ, self.start_response)
- block_gen = ui_request.error500("UiWSGIHandler error: %s" % Debug.formatExceptionMessage(err))
- for block in block_gen:
- self.write(block)
-
- def run_application(self):
- err_name = "UiWSGIHandler websocket" if "HTTP_UPGRADE" in self.environ else "UiWSGIHandler"
- try:
- super(UiWSGIHandler, self).run_application()
- except (ConnectionAbortedError, ConnectionResetError, BrokenPipeError) as err:
- logging.warning("%s connection error: %s" % (err_name, err))
- except Exception as err:
- logging.warning("%s error: %s" % (err_name, Debug.formatException(err)))
- self.handleError(err)
-
- def handle(self):
- # Save socket to be able to close them properly on exit
- self.server.sockets[self.client_address] = self.socket
- super(UiWSGIHandler, self).handle()
- del self.server.sockets[self.client_address]
-
-
-class UiServer:
- def __init__(self):
- self.ip = config.ui_ip
- self.port = config.ui_port
- self.running = False
- if self.ip == "*":
- self.ip = "0.0.0.0" # Bind all
- if config.ui_host:
- self.allowed_hosts = set(config.ui_host)
- elif config.ui_ip == "127.0.0.1":
- # IP Addresses are inherently allowed as they are immune to DNS
- # rebinding attacks.
- self.allowed_hosts = set(["zero", "localhost:%s" % config.ui_port])
- # "URI producers and normalizers should omit the port component and
- # its ':' delimiter if port is empty or if its value would be the
- # same as that of the scheme's default."
- # Source: https://tools.ietf.org/html/rfc3986#section-3.2.3
- # As a result, we need to support portless hosts if port 80 is in
- # use.
- if config.ui_port == 80:
- self.allowed_hosts.update(["localhost"])
- else:
- self.allowed_hosts = set([])
- self.allowed_ws_origins = set()
- self.allow_trans_proxy = config.ui_trans_proxy
-
- self.wrapper_nonces = []
- self.add_nonces = []
- self.websockets = []
- self.site_manager = SiteManager.site_manager
- self.sites = SiteManager.site_manager.list()
- self.log = logging.getLogger(__name__)
- config.error_logger.onNewRecord = self.handleErrorLogRecord
-
- def handleErrorLogRecord(self, record):
- self.updateWebsocket(log_event=record.levelname)
-
- # After WebUI started
- def afterStarted(self):
- from util import Platform
- Platform.setMaxfilesopened(config.max_files_opened)
-
- # Handle WSGI request
- def handleRequest(self, env, start_response):
- path = bytes(env["PATH_INFO"], "raw-unicode-escape").decode("utf8")
- if env.get("QUERY_STRING"):
- get = dict(urllib.parse.parse_qsl(env['QUERY_STRING']))
- else:
- get = {}
- ui_request = UiRequest(self, get, env, start_response)
- if config.debug: # Let the exception catched by werkezung
- return ui_request.route(path)
- else: # Catch and display the error
- try:
- return ui_request.route(path)
- except Exception as err:
- logging.debug("UiRequest error: %s" % Debug.formatException(err))
- return ui_request.error500("Err: %s" % Debug.formatException(err))
-
- # Reload the UiRequest class to prevent restarts in debug mode
- def reload(self):
- global UiRequest
- import imp
- import sys
- importlib.reload(sys.modules["User.UserManager"])
- importlib.reload(sys.modules["Ui.UiWebsocket"])
- UiRequest = imp.load_source("UiRequest", "src/Ui/UiRequest.py").UiRequest
- # UiRequest.reload()
-
- # Bind and run the server
- def start(self):
- self.running = True
- handler = self.handleRequest
-
- if config.debug:
- # Auto reload UiRequest on change
- from Debug import DebugReloader
- DebugReloader.watcher.addCallback(self.reload)
-
- # Werkzeug Debugger
- try:
- from werkzeug.debug import DebuggedApplication
- handler = DebuggedApplication(self.handleRequest, evalex=True)
- except Exception as err:
- self.log.info("%s: For debugging please download Werkzeug (http://werkzeug.pocoo.org/)" % err)
- from Debug import DebugReloader
- self.log.write = lambda msg: self.log.debug(msg.strip()) # For Wsgi access.log
- self.log.info("--------------------------------------")
- if ":" in config.ui_ip:
- self.log.info("Web interface: http://[%s]:%s/" % (config.ui_ip, config.ui_port))
- else:
- self.log.info("Web interface: http://%s:%s/" % (config.ui_ip, config.ui_port))
- self.log.info("--------------------------------------")
-
- if config.open_browser and config.open_browser != "False":
- logging.info("Opening browser: %s...", config.open_browser)
- import webbrowser
- try:
- if config.open_browser == "default_browser":
- browser = webbrowser.get()
- else:
- browser = webbrowser.get(config.open_browser)
- url = "http://%s:%s/%s" % (config.ui_ip if config.ui_ip != "*" else "127.0.0.1", config.ui_port, config.homepage)
- gevent.spawn_later(0.3, browser.open, url, new=2)
- except Exception as err:
- print("Error starting browser: %s" % err)
-
- self.server = WSGIServer((self.ip, self.port), handler, handler_class=UiWSGIHandler, log=self.log)
- self.server.sockets = {}
- self.afterStarted()
- try:
- self.server.serve_forever()
- except Exception as err:
- self.log.error("Web interface bind error, must be running already, exiting.... %s" % err)
- import main
- main.file_server.stop()
- self.log.debug("Stopped.")
-
- def stop(self):
- self.log.debug("Stopping...")
- # Close WS sockets
- if "clients" in dir(self.server):
- for client in list(self.server.clients.values()):
- client.ws.close()
- # Close http sockets
- sock_closed = 0
- for sock in list(self.server.sockets.values()):
- try:
- sock.send(b"bye")
- sock.shutdown(socket.SHUT_RDWR)
- # sock._sock.close()
- # sock.close()
- sock_closed += 1
- except Exception as err:
- self.log.debug("Http connection close error: %s" % err)
- self.log.debug("Socket closed: %s" % sock_closed)
- time.sleep(0.1)
- if config.debug:
- from Debug import DebugReloader
- DebugReloader.watcher.stop()
-
- self.server.socket.close()
- self.server.stop()
- self.running = False
- time.sleep(1)
-
- def updateWebsocket(self, **kwargs):
- if kwargs:
- param = {"event": list(kwargs.items())[0]}
- else:
- param = None
-
- for ws in self.websockets:
- ws.event("serverChanged", param)
diff --git a/src/Ui/UiWebsocket.py b/src/Ui/UiWebsocket.py
deleted file mode 100644
index 88e395d6..00000000
--- a/src/Ui/UiWebsocket.py
+++ /dev/null
@@ -1,1270 +0,0 @@
-import json
-import time
-import sys
-import os
-import shutil
-import re
-import copy
-import logging
-import stat
-
-import gevent
-
-from Config import config
-from Site import SiteManager
-from Crypt import CryptBitcoin
-from Debug import Debug
-from util import QueryJson, RateLimit
-from Plugin import PluginManager
-from Translate import translate as _
-from util import helper
-from util import SafeRe
-from util.Flag import flag
-from Content.ContentManager import VerifyError, SignError
-
-
-@PluginManager.acceptPlugins
-class UiWebsocket(object):
- def __init__(self, ws, site, server, user, request):
- self.ws = ws
- self.site = site
- self.user = user
- self.log = site.log
- self.request = request
- self.permissions = []
- self.server = server
- self.next_message_id = 1
- self.waiting_cb = {} # Waiting for callback. Key: message_id, Value: function pointer
- self.channels = [] # Channels joined to
- self.state = {"sending": False} # Shared state of websocket connection
- self.send_queue = [] # Messages to send to client
-
- # Start listener loop
- def start(self):
- ws = self.ws
- if self.site.address == config.homepage and not self.site.page_requested:
- # Add open fileserver port message or closed port error to homepage at first request after start
- self.site.page_requested = True # Dont add connection notification anymore
- import main
- file_server = main.file_server
- if not file_server.port_opened or file_server.tor_manager.start_onions is None:
- self.site.page_requested = False # Not ready yet, check next time
- else:
- try:
- self.addHomepageNotifications()
- except Exception as err:
- self.log.error("Uncaught Exception: " + Debug.formatException(err))
-
- for notification in self.site.notifications: # Send pending notification messages
- # send via WebSocket
- self.cmd("notification", notification)
- # just in case, log them to terminal
- if notification[0] == "error":
- self.log.error("\n*** %s\n" % self.dedent(notification[1]))
-
- self.site.notifications = []
-
- while True:
- try:
- if ws.closed:
- break
- else:
- message = ws.receive()
- except Exception as err:
- self.log.error("WebSocket receive error: %s" % Debug.formatException(err))
- break
-
- if message:
- try:
- req = json.loads(message)
- self.handleRequest(req)
- except Exception as err:
- if config.debug: # Allow websocket errors to appear on /Debug
- import main
- main.DebugHook.handleError()
- self.log.error("WebSocket handleRequest error: %s \n %s" % (Debug.formatException(err), message))
- if not self.hasPlugin("Multiuser"):
- self.cmd("error", "Internal error: %s" % Debug.formatException(err, "html"))
-
- self.onClosed()
-
- def onClosed(self):
- pass
-
- def dedent(self, text):
- return re.sub("[\\r\\n\\x20\\t]+", " ", text.strip().replace(" ", " "))
-
- def addHomepageNotifications(self):
- if not(self.hasPlugin("Multiuser")) and not(self.hasPlugin("UiPassword")):
- bind_ip = getattr(config, "ui_ip", "")
- whitelist = getattr(config, "ui_restrict", [])
- # binds to the Internet, no IP whitelist, no UiPassword, no Multiuser
- if ("0.0.0.0" == bind_ip or "*" == bind_ip) and (not whitelist):
- self.site.notifications.append([
- "error",
- _("You are not going to set up a public gateway. However, your Web UI is " +
- "open to the whole Internet. " +
- "Please check your configuration.")
- ])
-
- def hasPlugin(self, name):
- return name in PluginManager.plugin_manager.plugin_names
-
- # Has permission to run the command
- def hasCmdPermission(self, cmd):
- flags = flag.db.get(self.getCmdFuncName(cmd), ())
- if "admin" in flags and "ADMIN" not in self.permissions:
- return False
- else:
- return True
-
- # Has permission to access a site
- def hasSitePermission(self, address, cmd=None):
- if address != self.site.address and "ADMIN" not in self.site.settings["permissions"]:
- return False
- else:
- return True
-
- def hasFilePermission(self, inner_path):
- valid_signers = self.site.content_manager.getValidSigners(inner_path)
- return self.site.settings["own"] or self.user.getAuthAddress(self.site.address) in valid_signers
-
- # Event in a channel
- def event(self, channel, *params):
- if channel in self.channels: # We are joined to channel
- if channel == "siteChanged":
- site = params[0]
- site_info = self.formatSiteInfo(site, create_user=False)
- if len(params) > 1 and params[1]: # Extra data
- site_info.update(params[1])
- self.cmd("setSiteInfo", site_info)
- elif channel == "serverChanged":
- server_info = self.formatServerInfo()
- if len(params) > 0 and params[0]: # Extra data
- server_info.update(params[0])
- self.cmd("setServerInfo", server_info)
- elif channel == "announcerChanged":
- site = params[0]
- announcer_info = self.formatAnnouncerInfo(site)
- if len(params) > 1 and params[1]: # Extra data
- announcer_info.update(params[1])
- self.cmd("setAnnouncerInfo", announcer_info)
-
- # Send response to client (to = message.id)
- def response(self, to, result):
- self.send({"cmd": "response", "to": to, "result": result})
-
- # Send a command
- def cmd(self, cmd, params={}, cb=None):
- self.send({"cmd": cmd, "params": params}, cb)
-
- # Encode to json and send message
- def send(self, message, cb=None):
- message["id"] = self.next_message_id # Add message id to allow response
- self.next_message_id += 1
- if cb: # Callback after client responded
- self.waiting_cb[message["id"]] = cb
- self.send_queue.append(message)
- if self.state["sending"]:
- return # Already sending
- try:
- while self.send_queue:
- self.state["sending"] = True
- message = self.send_queue.pop(0)
- self.ws.send(json.dumps(message))
- self.state["sending"] = False
- except Exception as err:
- self.log.debug("Websocket send error: %s" % Debug.formatException(err))
- self.state["sending"] = False
-
- def getPermissions(self, req_id):
- permissions = self.site.settings["permissions"]
- if req_id >= 1000000: # Its a wrapper command, allow admin commands
- permissions = permissions[:]
- permissions.append("ADMIN")
- return permissions
-
- def asyncWrapper(self, func):
- def asyncErrorWatcher(func, *args, **kwargs):
- try:
- result = func(*args, **kwargs)
- if result is not None:
- self.response(args[0], result)
- except Exception as err:
- if config.debug: # Allow websocket errors to appear on /Debug
- import main
- main.DebugHook.handleError()
- self.log.error("WebSocket handleRequest error: %s" % Debug.formatException(err))
- self.cmd("error", "Internal error: %s" % Debug.formatException(err, "html"))
-
- def wrapper(*args, **kwargs):
- gevent.spawn(asyncErrorWatcher, func, *args, **kwargs)
- return wrapper
-
- def getCmdFuncName(self, cmd):
- func_name = "action" + cmd[0].upper() + cmd[1:]
- return func_name
-
- # Handle incoming messages
- def handleRequest(self, req):
-
- cmd = req.get("cmd")
- params = req.get("params")
- self.permissions = self.getPermissions(req["id"])
-
- if cmd == "response": # It's a response to a command
- return self.actionResponse(req["to"], req["result"])
- else: # Normal command
- func_name = self.getCmdFuncName(cmd)
- func = getattr(self, func_name, None)
- if self.site.settings.get("deleting"):
- return self.response(req["id"], {"error": "Site is deleting"})
-
- if not func: # Unknown command
- return self.response(req["id"], {"error": "Unknown command: %s" % cmd})
-
- if not self.hasCmdPermission(cmd): # Admin commands
- return self.response(req["id"], {"error": "You don't have permission to run %s" % cmd})
-
- # Execute in parallel
- func_flags = flag.db.get(self.getCmdFuncName(cmd), ())
- if func_flags and "async_run" in func_flags:
- func = self.asyncWrapper(func)
-
- # Support calling as named, unnamed parameters and raw first argument too
- if type(params) is dict:
- result = func(req["id"], **params)
- elif type(params) is list:
- result = func(req["id"], *params)
- elif params:
- result = func(req["id"], params)
- else:
- result = func(req["id"])
-
- if result is not None:
- self.response(req["id"], result)
-
- # Format site info
- def formatSiteInfo(self, site, create_user=True):
- content = site.content_manager.contents.get("content.json", {})
- if content: # Remove unnecessary data transfer
- content = content.copy()
- content["files"] = len(content.get("files", {}))
- content["files_optional"] = len(content.get("files_optional", {}))
- content["includes"] = len(content.get("includes", {}))
- if "sign" in content:
- del(content["sign"])
- if "signs" in content:
- del(content["signs"])
- if "signers_sign" in content:
- del(content["signers_sign"])
-
- settings = site.settings.copy()
- del settings["wrapper_key"] # Dont expose wrapper key
-
- ret = {
- "auth_address": self.user.getAuthAddress(site.address, create=create_user),
- "cert_user_id": self.user.getCertUserId(site.address),
- "address": site.address,
- "address_short": site.address_short,
- "address_hash": site.address_hash.hex(),
- "settings": settings,
- "content_updated": site.content_updated,
- "bad_files": len(site.bad_files),
- "size_limit": site.getSizeLimit(),
- "next_size_limit": site.getNextSizeLimit(),
- "peers": max(site.settings.get("peers", 0), len(site.peers)),
- "started_task_num": site.worker_manager.started_task_num,
- "tasks": len(site.worker_manager.tasks),
- "workers": len(site.worker_manager.workers),
- "content": content
- }
- if site.settings["own"]:
- ret["privatekey"] = bool(self.user.getSiteData(site.address, create=create_user).get("privatekey"))
- if site.isServing() and content:
- ret["peers"] += 1 # Add myself if serving
- return ret
-
- def formatServerInfo(self):
- import main
- file_server = main.file_server
- if file_server.port_opened == {}:
- ip_external = None
- else:
- ip_external = any(file_server.port_opened.values())
- back = {
- "ip_external": ip_external,
- "port_opened": file_server.port_opened,
- "platform": sys.platform,
- "fileserver_ip": config.fileserver_ip,
- "fileserver_port": config.fileserver_port,
- "tor_enabled": file_server.tor_manager.enabled,
- "tor_status": file_server.tor_manager.status,
- "tor_has_meek_bridges": file_server.tor_manager.has_meek_bridges,
- "tor_use_bridges": config.tor_use_bridges,
- "ui_ip": config.ui_ip,
- "ui_port": config.ui_port,
- "version": config.version,
- "rev": config.rev,
- "timecorrection": file_server.timecorrection,
- "language": config.language,
- "debug": config.debug,
- "offline": config.offline,
- "plugins": PluginManager.plugin_manager.plugin_names,
- "plugins_rev": PluginManager.plugin_manager.plugins_rev,
- "user_settings": self.user.settings
- }
- if "ADMIN" in self.site.settings["permissions"]:
- back["updatesite"] = config.updatesite
- back["dist_type"] = config.dist_type
- back["lib_verify_best"] = CryptBitcoin.lib_verify_best
- return back
-
- def formatAnnouncerInfo(self, site):
- return {"address": site.address, "stats": site.announcer.stats}
-
- # - Actions -
-
- def actionAs(self, to, address, cmd, params=[]):
- if not self.hasSitePermission(address, cmd=cmd):
- return self.response(to, "No permission for site %s" % address)
- req_self = copy.copy(self)
- req_self.site = self.server.sites.get(address)
- req_self.hasCmdPermission = self.hasCmdPermission # Use the same permissions as current site
- req_obj = super(UiWebsocket, req_self)
- req = {"id": to, "cmd": cmd, "params": params}
- req_obj.handleRequest(req)
-
- # Do callback on response {"cmd": "response", "to": message_id, "result": result}
- def actionResponse(self, to, result):
- if to in self.waiting_cb:
- self.waiting_cb[to](result) # Call callback function
- else:
- self.log.error("Websocket callback not found: %s, %s" % (to, result))
-
- # Send a simple pong answer
- def actionPing(self, to):
- self.response(to, "pong")
-
- # Send site details
- def actionSiteInfo(self, to, file_status=None):
- ret = self.formatSiteInfo(self.site)
- if file_status: # Client queries file status
- if self.site.storage.isFile(file_status): # File exist, add event done
- ret["event"] = ("file_done", file_status)
- self.response(to, ret)
-
- def actionSiteBadFiles(self, to):
- return list(self.site.bad_files.keys())
-
- # Join to an event channel
- def actionChannelJoin(self, to, channels):
- if type(channels) != list:
- channels = [channels]
-
- for channel in channels:
- if channel not in self.channels:
- self.channels.append(channel)
-
- self.response(to, "ok")
-
- # Server variables
- def actionServerInfo(self, to):
- back = self.formatServerInfo()
- self.response(to, back)
-
- # Create a new wrapper nonce that allows to load html file
- @flag.admin
- def actionServerGetWrapperNonce(self, to):
- wrapper_nonce = self.request.getWrapperNonce()
- self.response(to, wrapper_nonce)
-
- def actionAnnouncerInfo(self, to):
- back = self.formatAnnouncerInfo(self.site)
- self.response(to, back)
-
- @flag.admin
- def actionAnnouncerStats(self, to):
- back = {}
- trackers = self.site.announcer.getTrackers()
- for site in list(self.server.sites.values()):
- for tracker, stats in site.announcer.stats.items():
- if tracker not in trackers:
- continue
- if tracker not in back:
- back[tracker] = {}
- is_latest_data = bool(stats["time_request"] > back[tracker].get("time_request", 0) and stats["status"])
- for key, val in stats.items():
- if key.startswith("num_"):
- back[tracker][key] = back[tracker].get(key, 0) + val
- elif is_latest_data:
- back[tracker][key] = val
-
- return back
-
- # Sign content.json
- def actionSiteSign(self, to, privatekey=None, inner_path="content.json", remove_missing_optional=False, update_changed_files=False, response_ok=True):
- self.log.debug("Signing: %s" % inner_path)
- site = self.site
- extend = {} # Extended info for signing
-
- # Change to the file's content.json
- file_info = site.content_manager.getFileInfo(inner_path)
- if not inner_path.endswith("content.json"):
- if not file_info:
- raise Exception("Invalid content.json file: %s" % inner_path)
- inner_path = file_info["content_inner_path"]
-
- # Add certificate to user files
- is_user_content = file_info and ("cert_signers" in file_info or "cert_signers_pattern" in file_info)
- if is_user_content and privatekey is None:
- cert = self.user.getCert(self.site.address)
- extend["cert_auth_type"] = cert["auth_type"]
- extend["cert_user_id"] = self.user.getCertUserId(site.address)
- extend["cert_sign"] = cert["cert_sign"]
- self.log.debug("Extending content.json with cert %s" % extend["cert_user_id"])
-
- if not self.hasFilePermission(inner_path):
- self.log.error("SiteSign error: you don't own this site & site owner doesn't allow you to do so.")
- return self.response(to, {"error": "Forbidden, you can only modify your own sites"})
-
- if privatekey == "stored": # Get privatekey from sites.json
- privatekey = self.user.getSiteData(self.site.address).get("privatekey")
- if not privatekey:
- self.cmd("notification", ["error", _["Content signing failed"] + "Private key not found in sites.json "])
- self.response(to, {"error": "Site sign failed: Private key not stored."})
- self.log.error("Site sign failed: %s: Private key not stored in sites.json" % inner_path)
- return
- if not privatekey: # Get privatekey from users.json auth_address
- privatekey = self.user.getAuthPrivatekey(self.site.address)
-
- # Signing
- # Reload content.json, ignore errors to make it up-to-date
- site.content_manager.loadContent(inner_path, add_bad_files=False, force=True)
- # Sign using private key sent by user
- try:
- site.content_manager.sign(inner_path, privatekey, extend=extend, update_changed_files=update_changed_files, remove_missing_optional=remove_missing_optional)
- except (VerifyError, SignError) as err:
- self.cmd("notification", ["error", _["Content signing failed"] + "%s " % err])
- self.response(to, {"error": "Site sign failed: %s" % err})
- self.log.error("Site sign failed: %s: %s" % (inner_path, Debug.formatException(err)))
- return
- except Exception as err:
- self.cmd("notification", ["error", _["Content signing error"] + "%s " % Debug.formatException(err)])
- self.response(to, {"error": "Site sign error: %s" % Debug.formatException(err)})
- self.log.error("Site sign error: %s: %s" % (inner_path, Debug.formatException(err)))
- return
-
- site.content_manager.loadContent(inner_path, add_bad_files=False) # Load new content.json, ignore errors
-
- if update_changed_files:
- self.site.updateWebsocket(file_done=inner_path)
-
- if response_ok:
- self.response(to, "ok")
- else:
- return inner_path
-
- # Sign and publish content.json
- def actionSitePublish(self, to, privatekey=None, inner_path="content.json", sign=True, remove_missing_optional=False, update_changed_files=False):
- if sign:
- inner_path = self.actionSiteSign(
- to, privatekey, inner_path, response_ok=False,
- remove_missing_optional=remove_missing_optional, update_changed_files=update_changed_files
- )
- if not inner_path:
- return
- # Publishing
- if not self.site.settings["serving"]: # Enable site if paused
- self.site.settings["serving"] = True
- self.site.saveSettings()
- self.site.announce()
-
- if inner_path not in self.site.content_manager.contents:
- return self.response(to, {"error": "File %s not found" % inner_path})
-
- event_name = "publish %s %s" % (self.site.address, inner_path)
- called_instantly = RateLimit.isAllowed(event_name, 30)
- thread = RateLimit.callAsync(event_name, 30, self.doSitePublish, self.site, inner_path) # Only publish once in 30 seconds
- notification = "linked" not in dir(thread) # Only display notification on first callback
- thread.linked = True
- if called_instantly: # Allowed to call instantly
- # At the end callback with request id and thread
- self.cmd("progress", ["publish", _["Content published to {0}/{1} peers."].format(0, 5), 0])
- thread.link(lambda thread: self.cbSitePublish(to, self.site, thread, notification, callback=notification))
- else:
- self.cmd(
- "notification",
- ["info", _["Content publish queued for {0:.0f} seconds."].format(RateLimit.delayLeft(event_name, 30)), 5000]
- )
- self.response(to, "ok")
- # At the end display notification
- thread.link(lambda thread: self.cbSitePublish(to, self.site, thread, notification, callback=False))
-
- def doSitePublish(self, site, inner_path):
- def cbProgress(published, limit):
- progress = int(float(published) / limit * 100)
- self.cmd("progress", [
- "publish",
- _["Content published to {0}/{1} peers."].format(published, limit),
- progress
- ])
- diffs = site.content_manager.getDiffs(inner_path)
- back = site.publish(limit=5, inner_path=inner_path, diffs=diffs, cb_progress=cbProgress)
- if back == 0: # Failed to publish to anyone
- self.cmd("progress", ["publish", _["Content publish failed."], -100])
- else:
- cbProgress(back, back)
- return back
-
- # Callback of site publish
- def cbSitePublish(self, to, site, thread, notification=True, callback=True):
- published = thread.value
- if published > 0: # Successfully published
- if notification:
- # self.cmd("notification", ["done", _["Content published to {0} peers."].format(published), 5000])
- site.updateWebsocket() # Send updated site data to local websocket clients
- if callback:
- self.response(to, "ok")
- else:
- if len(site.peers) == 0:
- import main
- if any(main.file_server.port_opened.values()) or main.file_server.tor_manager.start_onions:
- if notification:
- self.cmd("notification", ["info", _["No peers found, but your content is ready to access."]])
- if callback:
- self.response(to, "ok")
- else:
- if notification:
- self.cmd("notification", [
- "info",
- _("""{_[Your network connection is restricted. Please, open {0} port]}
- {_[on your router to make your site accessible for everyone.]}""").format(config.fileserver_port)
- ])
- if callback:
- self.response(to, {"error": "Port not opened."})
-
- else:
- if notification:
- self.response(to, {"error": "Content publish failed."})
-
- def actionSiteReload(self, to, inner_path):
- self.site.content_manager.loadContent(inner_path, add_bad_files=False)
- self.site.storage.verifyFiles(quick_check=True)
- self.site.updateWebsocket()
- return "ok"
-
- # Write a file to disk
- def actionFileWrite(self, to, inner_path, content_base64, ignore_bad_files=False):
- valid_signers = self.site.content_manager.getValidSigners(inner_path)
- auth_address = self.user.getAuthAddress(self.site.address)
- if not self.hasFilePermission(inner_path):
- self.log.error("FileWrite forbidden %s not in valid_signers %s" % (auth_address, valid_signers))
- return self.response(to, {"error": "Forbidden, you can only modify your own files"})
-
- # Try not to overwrite files currently in sync
- content_inner_path = re.sub("^(.*)/.*?$", "\\1/content.json", inner_path) # Also check the content.json from same directory
- if (self.site.bad_files.get(inner_path) or self.site.bad_files.get(content_inner_path)) and not ignore_bad_files:
- found = self.site.needFile(inner_path, update=True, priority=10)
- if not found:
- self.cmd(
- "confirm",
- [_["This file still in sync, if you write it now, then the previous content may be lost."], _["Write content anyway"]],
- lambda res: self.actionFileWrite(to, inner_path, content_base64, ignore_bad_files=True)
- )
- return False
-
- try:
- import base64
- content = base64.b64decode(content_base64)
- # Save old file to generate patch later
- if (
- inner_path.endswith(".json") and not inner_path.endswith("content.json") and
- self.site.storage.isFile(inner_path) and not self.site.storage.isFile(inner_path + "-old")
- ):
- try:
- self.site.storage.rename(inner_path, inner_path + "-old")
- except Exception:
- # Rename failed, fall back to standard file write
- f_old = self.site.storage.open(inner_path, "rb")
- f_new = self.site.storage.open(inner_path + "-old", "wb")
- shutil.copyfileobj(f_old, f_new)
-
- self.site.storage.write(inner_path, content)
- except Exception as err:
- self.log.error("File write error: %s" % Debug.formatException(err))
- return self.response(to, {"error": "Write error: %s" % Debug.formatException(err)})
-
- if inner_path.endswith("content.json"):
- self.site.content_manager.loadContent(inner_path, add_bad_files=False, force=True)
-
- self.response(to, "ok")
-
- # Send sitechanged to other local users
- for ws in self.site.websockets:
- if ws != self:
- ws.event("siteChanged", self.site, {"event": ["file_done", inner_path]})
-
- def actionFileDelete(self, to, inner_path):
- if not self.hasFilePermission(inner_path):
- self.log.error("File delete error: you don't own this site & you are not approved by the owner.")
- return self.response(to, {"error": "Forbidden, you can only modify your own files"})
-
- need_delete = True
- file_info = self.site.content_manager.getFileInfo(inner_path)
- if file_info and file_info.get("optional"):
- # Non-existing optional files won't be removed from content.json, so we have to do it manually
- self.log.debug("Deleting optional file: %s" % inner_path)
- relative_path = file_info["relative_path"]
- content_json = self.site.storage.loadJson(file_info["content_inner_path"])
- if relative_path in content_json.get("files_optional", {}):
- del content_json["files_optional"][relative_path]
- self.site.storage.writeJson(file_info["content_inner_path"], content_json)
- self.site.content_manager.loadContent(file_info["content_inner_path"], add_bad_files=False, force=True)
- need_delete = self.site.storage.isFile(inner_path) # File sill exists after removing from content.json (owned site)
-
- if need_delete:
- try:
- self.site.storage.delete(inner_path)
- except Exception as err:
- self.log.error("File delete error: %s" % err)
- return self.response(to, {"error": "Delete error: %s" % Debug.formatExceptionMessage(err)})
-
- self.response(to, "ok")
-
- # Send sitechanged to other local users
- for ws in self.site.websockets:
- if ws != self:
- ws.event("siteChanged", self.site, {"event": ["file_deleted", inner_path]})
-
- # Find data in json files
- def actionFileQuery(self, to, dir_inner_path, query=None):
- # s = time.time()
- dir_path = self.site.storage.getPath(dir_inner_path)
- rows = list(QueryJson.query(dir_path, query or ""))
- # self.log.debug("FileQuery %s %s done in %s" % (dir_inner_path, query, time.time()-s))
- return self.response(to, rows)
-
- # List files in directory
- @flag.async_run
- def actionFileList(self, to, inner_path):
- try:
- return list(self.site.storage.walk(inner_path))
- except Exception as err:
- self.log.error("fileList %s error: %s" % (inner_path, Debug.formatException(err)))
- return {"error": Debug.formatExceptionMessage(err)}
-
- # List directories in a directory
- @flag.async_run
- def actionDirList(self, to, inner_path, stats=False):
- try:
- if stats:
- back = []
- for file_name in self.site.storage.list(inner_path):
- file_stats = os.stat(self.site.storage.getPath(inner_path + "/" + file_name))
- is_dir = stat.S_ISDIR(file_stats.st_mode)
- back.append(
- {"name": file_name, "size": file_stats.st_size, "is_dir": is_dir}
- )
- return back
- else:
- return list(self.site.storage.list(inner_path))
- except Exception as err:
- self.log.error("dirList %s error: %s" % (inner_path, Debug.formatException(err)))
- return {"error": Debug.formatExceptionMessage(err)}
-
- # Sql query
- def actionDbQuery(self, to, query, params=None, wait_for=None):
- if config.debug or config.verbose:
- s = time.time()
- rows = []
- try:
- res = self.site.storage.query(query, params)
- except Exception as err: # Response the error to client
- self.log.error("DbQuery error: %s" % Debug.formatException(err))
- return self.response(to, {"error": Debug.formatExceptionMessage(err)})
- # Convert result to dict
- for row in res:
- rows.append(dict(row))
- if config.verbose and time.time() - s > 0.1: # Log slow query
- self.log.debug("Slow query: %s (%.3fs)" % (query, time.time() - s))
- return self.response(to, rows)
-
- # Return file content
- @flag.async_run
- def actionFileGet(self, to, inner_path, required=True, format="text", timeout=300, priority=6):
- try:
- if required or inner_path in self.site.bad_files:
- with gevent.Timeout(timeout):
- self.site.needFile(inner_path, priority=priority)
- body = self.site.storage.read(inner_path, "rb")
- except (Exception, gevent.Timeout) as err:
- self.log.debug("%s fileGet error: %s" % (inner_path, Debug.formatException(err)))
- body = None
-
- if not body:
- body = None
- elif format == "base64":
- import base64
- body = base64.b64encode(body).decode()
- else:
- try:
- body = body.decode()
- except Exception as err:
- self.response(to, {"error": "Error decoding text: %s" % err})
- self.response(to, body)
-
- @flag.async_run
- def actionFileNeed(self, to, inner_path, timeout=300, priority=6):
- try:
- with gevent.Timeout(timeout):
- self.site.needFile(inner_path, priority=priority)
- except (Exception, gevent.Timeout) as err:
- return self.response(to, {"error": Debug.formatExceptionMessage(err)})
- return self.response(to, "ok")
-
- def actionFileRules(self, to, inner_path, use_my_cert=False, content=None):
- if not content: # No content defined by function call
- content = self.site.content_manager.contents.get(inner_path)
-
- if not content: # File not created yet
- cert = self.user.getCert(self.site.address)
- if cert and cert["auth_address"] in self.site.content_manager.getValidSigners(inner_path):
- # Current selected cert if valid for this site, add it to query rules
- content = {}
- content["cert_auth_type"] = cert["auth_type"]
- content["cert_user_id"] = self.user.getCertUserId(self.site.address)
- content["cert_sign"] = cert["cert_sign"]
-
- rules = self.site.content_manager.getRules(inner_path, content)
- if inner_path.endswith("content.json") and rules:
- if content:
- rules["current_size"] = len(json.dumps(content)) + sum([file["size"] for file in list(content.get("files", {}).values())])
- else:
- rules["current_size"] = 0
- return self.response(to, rules)
-
- # Add certificate to user
- def actionCertAdd(self, to, domain, auth_type, auth_user_name, cert):
- try:
- res = self.user.addCert(self.user.getAuthAddress(self.site.address), domain, auth_type, auth_user_name, cert)
- if res is True:
- self.cmd(
- "notification",
- ["done", _("{_[New certificate added]:} {auth_type}/{auth_user_name}@{domain} .")]
- )
- self.user.setCert(self.site.address, domain)
- self.site.updateWebsocket(cert_changed=domain)
- self.response(to, "ok")
- elif res is False:
- # Display confirmation of change
- cert_current = self.user.certs[domain]
- body = _("{_[Your current certificate]:} {cert_current[auth_type]}/{cert_current[auth_user_name]}@{domain} ")
- self.cmd(
- "confirm",
- [body, _("Change it to {auth_type}/{auth_user_name}@{domain}")],
- lambda res: self.cbCertAddConfirm(to, domain, auth_type, auth_user_name, cert)
- )
- else:
- self.response(to, "Not changed")
- except Exception as err:
- self.log.error("CertAdd error: Exception - %s (%s)" % (err.message, Debug.formatException(err)))
- self.response(to, {"error": err.message})
-
- def cbCertAddConfirm(self, to, domain, auth_type, auth_user_name, cert):
- self.user.deleteCert(domain)
- self.user.addCert(self.user.getAuthAddress(self.site.address), domain, auth_type, auth_user_name, cert)
- self.cmd(
- "notification",
- ["done", _("Certificate changed to: {auth_type}/{auth_user_name}@{domain} .")]
- )
- self.user.setCert(self.site.address, domain)
- self.site.updateWebsocket(cert_changed=domain)
- self.response(to, "ok")
-
- # Select certificate for site
- def actionCertSelect(self, to, accepted_domains=[], accept_any=False, accepted_pattern=None):
- accounts = []
- accounts.append(["", _["No certificate"], ""]) # Default option
- active = "" # Make it active if no other option found
-
- # Add my certs
- auth_address = self.user.getAuthAddress(self.site.address) # Current auth address
- site_data = self.user.getSiteData(self.site.address) # Current auth address
-
- if not accepted_domains and not accepted_pattern: # Accept any if no filter defined
- accept_any = True
-
- for domain, cert in list(self.user.certs.items()):
- if auth_address == cert["auth_address"] and domain == site_data.get("cert"):
- active = domain
- title = cert["auth_user_name"] + "@" + domain
- accepted_pattern_match = accepted_pattern and SafeRe.match(accepted_pattern, domain)
- if domain in accepted_domains or accept_any or accepted_pattern_match:
- accounts.append([domain, title, ""])
- else:
- accounts.append([domain, title, "disabled"])
-
- # Render the html
- body = "" + _["Select account you want to use in this site:"] + " "
- # Accounts
- for domain, account, css_class in accounts:
- if domain == active:
- css_class += " active" # Currently selected option
- title = _("%s ({_[currently selected]}) ") % account
- else:
- title = "%s " % account
- body += "%s " % (css_class, domain, title)
- # More available providers
- more_domains = [domain for domain in accepted_domains if domain not in self.user.certs] # Domains we not displayed yet
- if more_domains:
- # body+= "Accepted authorization providers by the site: "
- body += ""
-
- script = """
- $(".notification .select.cert").on("click", function() {
- $(".notification .select").removeClass('active')
- zeroframe.response(%s, this.title)
- return false
- })
- """ % self.next_message_id
-
- self.cmd("notification", ["ask", body], lambda domain: self.actionCertSet(to, domain))
- self.cmd("injectScript", script)
-
- # - Admin actions -
-
- @flag.admin
- def actionPermissionAdd(self, to, permission):
- if permission not in self.site.settings["permissions"]:
- self.site.settings["permissions"].append(permission)
- self.site.saveSettings()
- self.site.updateWebsocket(permission_added=permission)
- self.response(to, "ok")
-
- @flag.admin
- def actionPermissionRemove(self, to, permission):
- self.site.settings["permissions"].remove(permission)
- self.site.saveSettings()
- self.site.updateWebsocket(permission_removed=permission)
- self.response(to, "ok")
-
- @flag.admin
- def actionPermissionDetails(self, to, permission):
- if permission == "ADMIN":
- self.response(to, _["Modify your client's configuration and access all site"] + " " + _["(Dangerous!)"] + " ")
- elif permission == "NOSANDBOX":
- self.response(to, _["Modify your client's configuration and access all site"] + " " + _["(Dangerous!)"] + " ")
- elif permission == "PushNotification":
- self.response(to, _["Send notifications"])
- else:
- self.response(to, "")
-
- # Set certificate that used for authenticate user for site
- @flag.admin
- def actionCertSet(self, to, domain):
- self.user.setCert(self.site.address, domain)
- self.site.updateWebsocket(cert_changed=domain)
- self.response(to, "ok")
-
- # List user's certificates
- @flag.admin
- def actionCertList(self, to):
- back = []
- auth_address = self.user.getAuthAddress(self.site.address)
- for domain, cert in list(self.user.certs.items()):
- back.append({
- "auth_address": cert["auth_address"],
- "auth_type": cert["auth_type"],
- "auth_user_name": cert["auth_user_name"],
- "domain": domain,
- "selected": cert["auth_address"] == auth_address
- })
- return back
-
- # List all site info
- @flag.admin
- def actionSiteList(self, to, connecting_sites=False):
- ret = []
- for site in list(self.server.sites.values()):
- if not site.content_manager.contents.get("content.json") and not connecting_sites:
- continue # Incomplete site
- ret.append(self.formatSiteInfo(site, create_user=False)) # Dont generate the auth_address on listing
- self.response(to, ret)
-
- # Join to an event channel on all sites
- @flag.admin
- def actionChannelJoinAllsite(self, to, channel):
- if channel not in self.channels: # Add channel to channels
- self.channels.append(channel)
-
- for site in list(self.server.sites.values()): # Add websocket to every channel
- if self not in site.websockets:
- site.websockets.append(self)
-
- self.response(to, "ok")
-
- # Update site content.json
- def actionSiteUpdate(self, to, address, check_files=False, since=None, announce=False):
- def updateThread():
- site.update(announce=announce, check_files=check_files, since=since)
- self.response(to, "Updated")
-
- site = self.server.sites.get(address)
- if site and (site.address == self.site.address or "ADMIN" in self.site.settings["permissions"]):
- if not site.settings["serving"]:
- site.settings["serving"] = True
- site.saveSettings()
-
- gevent.spawn(updateThread)
- else:
- self.response(to, {"error": "Unknown site: %s" % address})
-
- # Pause site serving
- @flag.admin
- def actionSitePause(self, to, address):
- site = self.server.sites.get(address)
- if site:
- site.settings["serving"] = False
- site.saveSettings()
- site.updateWebsocket()
- site.worker_manager.stopWorkers()
- self.response(to, "Paused")
- else:
- self.response(to, {"error": "Unknown site: %s" % address})
-
- # Resume site serving
- @flag.admin
- def actionSiteResume(self, to, address):
- site = self.server.sites.get(address)
- if site:
- site.settings["serving"] = True
- site.saveSettings()
- gevent.spawn(site.update, announce=True)
- time.sleep(0.001) # Wait for update thread starting
- site.updateWebsocket()
- self.response(to, "Resumed")
- else:
- self.response(to, {"error": "Unknown site: %s" % address})
-
- @flag.admin
- @flag.no_multiuser
- def actionSiteDelete(self, to, address):
- site = self.server.sites.get(address)
- if site:
- site.delete()
- self.user.deleteSiteData(address)
- self.response(to, "Deleted")
- import gc
- gc.collect(2)
- else:
- self.response(to, {"error": "Unknown site: %s" % address})
-
- def cbSiteClone(self, to, address, root_inner_path="", target_address=None, redirect=True):
- self.cmd("notification", ["info", _["Cloning site..."]])
- site = self.server.sites.get(address)
- response = {}
- if target_address:
- target_site = self.server.sites.get(target_address)
- privatekey = self.user.getSiteData(target_site.address).get("privatekey")
- site.clone(target_address, privatekey, root_inner_path=root_inner_path)
- self.cmd("notification", ["done", _["Site source code upgraded!"]])
- site.publish()
- response = {"address": target_address}
- else:
- # Generate a new site from user's bip32 seed
- new_address, new_address_index, new_site_data = self.user.getNewSiteData()
- new_site = site.clone(new_address, new_site_data["privatekey"], address_index=new_address_index, root_inner_path=root_inner_path)
- new_site.settings["own"] = True
- new_site.saveSettings()
- self.cmd("notification", ["done", _["Site cloned"]])
- if redirect:
- self.cmd("redirect", "/%s" % new_address)
- gevent.spawn(new_site.announce)
- response = {"address": new_address}
- self.response(to, response)
- return "ok"
-
- @flag.no_multiuser
- def actionSiteClone(self, to, address, root_inner_path="", target_address=None, redirect=True):
- if not SiteManager.site_manager.isAddress(address):
- self.response(to, {"error": "Not a site: %s" % address})
- return
-
- if not self.server.sites.get(address):
- # Don't expose site existence
- return
-
- site = self.server.sites.get(address)
- if site.bad_files:
- for bad_inner_path in list(site.bad_files.keys()):
- is_user_file = "cert_signers" in site.content_manager.getRules(bad_inner_path)
- if not is_user_file and bad_inner_path != "content.json":
- self.cmd("notification", ["error", _["Clone error: Site still in sync"]])
- return {"error": "Site still in sync"}
-
- if "ADMIN" in self.getPermissions(to):
- self.cbSiteClone(to, address, root_inner_path, target_address, redirect)
- else:
- self.cmd(
- "confirm",
- [_["Clone site %s ?"] % address, _["Clone"]],
- lambda res: self.cbSiteClone(to, address, root_inner_path, target_address, redirect)
- )
-
- @flag.admin
- @flag.no_multiuser
- def actionSiteSetLimit(self, to, size_limit):
- self.site.settings["size_limit"] = int(size_limit)
- self.site.saveSettings()
- self.response(to, "ok")
- self.site.updateWebsocket()
- self.site.download(blind_includes=True)
-
- @flag.admin
- def actionSiteAdd(self, to, address):
- site_manager = SiteManager.site_manager
- if address in site_manager.sites:
- return {"error": "Site already added"}
- else:
- if site_manager.need(address):
- return "ok"
- else:
- return {"error": "Invalid address"}
-
- @flag.async_run
- def actionSiteListModifiedFiles(self, to, content_inner_path="content.json"):
- content = self.site.content_manager.contents.get(content_inner_path)
- if not content:
- return {"error": "content file not avaliable"}
-
- min_mtime = content.get("modified", 0)
- site_path = self.site.storage.directory
- modified_files = []
-
- # Load cache if not signed since last modified check
- if content.get("modified", 0) < self.site.settings["cache"].get("time_modified_files_check", 0):
- min_mtime = self.site.settings["cache"].get("time_modified_files_check")
- modified_files = self.site.settings["cache"].get("modified_files", [])
-
- inner_paths = [content_inner_path] + list(content.get("includes", {}).keys()) + list(content.get("files", {}).keys())
-
- if len(inner_paths) > 100:
- return {"error": "Too many files in content.json"}
-
- for relative_inner_path in inner_paths:
- inner_path = helper.getDirname(content_inner_path) + relative_inner_path
- try:
- is_mtime_newer = os.path.getmtime(self.site.storage.getPath(inner_path)) > min_mtime + 1
- if is_mtime_newer:
- if inner_path.endswith("content.json"):
- is_modified = self.site.content_manager.isModified(inner_path)
- else:
- previous_size = content["files"][inner_path]["size"]
- is_same_size = self.site.storage.getSize(inner_path) == previous_size
- ext = inner_path.rsplit(".", 1)[-1]
- is_text_file = ext in ["json", "txt", "html", "js", "css"]
- if is_same_size:
- if is_text_file:
- is_modified = self.site.content_manager.isModified(inner_path) # Check sha512 hash
- else:
- is_modified = False
- else:
- is_modified = True
-
- # Check ran, modified back to original value, but in the cache
- if not is_modified and inner_path in modified_files:
- modified_files.remove(inner_path)
- else:
- is_modified = False
- except Exception as err:
- if not self.site.storage.isFile(inner_path): # File deleted
- is_modified = True
- else:
- raise err
- if is_modified and inner_path not in modified_files:
- modified_files.append(inner_path)
-
- self.site.settings["cache"]["time_modified_files_check"] = time.time()
- self.site.settings["cache"]["modified_files"] = modified_files
- return {"modified_files": modified_files}
-
- @flag.admin
- def actionSiteSetSettingsValue(self, to, key, value):
- if key not in ["modified_files_notification"]:
- return {"error": "Can't change this key"}
-
- self.site.settings[key] = value
-
- return "ok"
-
- def actionUserGetSettings(self, to):
- settings = self.user.sites.get(self.site.address, {}).get("settings", {})
- self.response(to, settings)
-
- def actionUserSetSettings(self, to, settings):
- self.user.setSiteSettings(self.site.address, settings)
- self.response(to, "ok")
-
- def actionUserGetGlobalSettings(self, to):
- settings = self.user.settings
- self.response(to, settings)
-
- @flag.admin
- def actionUserSetGlobalSettings(self, to, settings):
- self.user.settings = settings
- self.user.save()
- self.response(to, "ok")
-
- @flag.admin
- @flag.no_multiuser
- def actionServerErrors(self, to):
- return config.error_logger.lines
-
- @flag.admin
- @flag.no_multiuser
- def actionServerUpdate(self, to):
- def cbServerUpdate(res):
- self.response(to, res)
- if not res:
- return False
- for websocket in self.server.websockets:
- websocket.cmd(
- "notification",
- ["info", _["Updating ZeroNet client, will be back in a few minutes..."], 20000]
- )
- websocket.cmd("updating")
-
- import main
- main.update_after_shutdown = True
- main.restart_after_shutdown = True
- SiteManager.site_manager.save()
- main.file_server.stop()
- main.ui_server.stop()
-
- self.cmd(
- "confirm",
- [_["Update ZeroNet client to latest version?"], _["Update"]],
- cbServerUpdate
- )
-
- @flag.admin
- @flag.async_run
- @flag.no_multiuser
- def actionServerPortcheck(self, to):
- import main
- file_server = main.file_server
- file_server.portCheck()
- self.response(to, file_server.port_opened)
-
- @flag.admin
- @flag.no_multiuser
- def actionServerShutdown(self, to, restart=False):
- import main
- def cbServerShutdown(res):
- self.response(to, res)
- if not res:
- return False
- if restart:
- main.restart_after_shutdown = True
- main.file_server.stop()
- main.ui_server.stop()
-
- if restart:
- message = [_["Restart ZeroNet client ?"], _["Restart"]]
- else:
- message = [_["Shut down ZeroNet client ?"], _["Shut down"]]
- self.cmd("confirm", message, cbServerShutdown)
-
- @flag.admin
- @flag.no_multiuser
- def actionServerShowdirectory(self, to, directory="backup", inner_path=""):
- if self.request.env["REMOTE_ADDR"] != "127.0.0.1":
- return self.response(to, {"error": "Only clients from 127.0.0.1 allowed to run this command"})
-
- import webbrowser
- if directory == "backup":
- path = os.path.abspath(config.data_dir)
- elif directory == "log":
- path = os.path.abspath(config.log_dir)
- elif directory == "site":
- path = os.path.abspath(self.site.storage.getPath(helper.getDirname(inner_path)))
-
- if os.path.isdir(path):
- self.log.debug("Opening: %s" % path)
- webbrowser.open('file://' + path)
- return self.response(to, "ok")
- else:
- return self.response(to, {"error": "Not a directory"})
-
- @flag.admin
- @flag.no_multiuser
- def actionConfigSet(self, to, key, value):
- import main
-
- self.log.debug("Changing config %s value to %r" % (key, value))
- if key not in config.keys_api_change_allowed:
- self.response(to, {"error": "Forbidden: You cannot set this config key"})
- return
-
- if key == "open_browser":
- if value not in ["default_browser", "False"]:
- self.response(to, {"error": "Forbidden: Invalid value"})
- return
-
- # Remove empty lines from lists
- if type(value) is list:
- value = [line for line in value if line]
-
- config.saveValue(key, value)
-
- if key not in config.keys_restart_need:
- if value is None: # Default value
- setattr(config, key, config.parser.get_default(key))
- setattr(config.arguments, key, config.parser.get_default(key))
- else:
- setattr(config, key, value)
- setattr(config.arguments, key, value)
- else:
- config.need_restart = True
- config.pending_changes[key] = value
-
- if key == "language":
- import Translate
- for translate in Translate.translates:
- translate.setLanguage(value)
- message = _["You have successfully changed the web interface's language!"] + " "
- message += _["Due to the browser's caching, the full transformation could take some minute."]
- self.cmd("notification", ["done", message, 10000])
-
- if key == "tor_use_bridges":
- if value is None:
- value = False
- else:
- value = True
- tor_manager = main.file_server.tor_manager
- tor_manager.request("SETCONF UseBridges=%i" % value)
-
- if key == "trackers_file":
- config.loadTrackersFile()
-
- if key == "log_level":
- logging.getLogger('').setLevel(logging.getLevelName(config.log_level))
-
- if key == "ip_external":
- gevent.spawn(main.file_server.portCheck)
-
- if key == "offline":
- if value:
- main.file_server.closeConnections()
- else:
- gevent.spawn(main.file_server.checkSites, check_files=False, force_port_check=True)
-
- self.response(to, "ok")
diff --git a/src/Ui/__init__.py b/src/Ui/__init__.py
deleted file mode 100644
index dcb8896d..00000000
--- a/src/Ui/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from .UiServer import UiServer
-from .UiRequest import UiRequest
-from .UiWebsocket import UiWebsocket
\ No newline at end of file
diff --git a/src/Ui/media/Fixbutton.coffee b/src/Ui/media/Fixbutton.coffee
deleted file mode 100644
index 954d2b56..00000000
--- a/src/Ui/media/Fixbutton.coffee
+++ /dev/null
@@ -1,32 +0,0 @@
-class Fixbutton
- constructor: ->
- @dragging = false
- $(".fixbutton-bg").on "mouseover", ->
- $(".fixbutton-bg").stop().animate({"scale": 0.7}, 800, "easeOutElastic")
- $(".fixbutton-burger").stop().animate({"opacity": 1.5, "left": 0}, 800, "easeOutElastic")
- $(".fixbutton-text").stop().animate({"opacity": 0, "left": 20}, 300, "easeOutCubic")
-
- $(".fixbutton-bg").on "mouseout", ->
- if $(".fixbutton").hasClass("dragging")
- return true
- $(".fixbutton-bg").stop().animate({"scale": 0.6}, 300, "easeOutCubic")
- $(".fixbutton-burger").stop().animate({"opacity": 0, "left": -20}, 300, "easeOutCubic")
- $(".fixbutton-text").stop().animate({"opacity": 0.9, "left": 0}, 300, "easeOutBack")
-
-
- ###$(".fixbutton-bg").on "click", ->
- return false
- ###
-
- $(".fixbutton-bg").on "mousedown", ->
- # $(".fixbutton-burger").stop().animate({"scale": 0.7, "left": 0}, 300, "easeOutCubic")
- #$("#inner-iframe").toggleClass("back")
- #$(".wrapper-iframe").stop().animate({"scale": 0.9}, 600, "easeOutCubic")
- #$("body").addClass("back")
-
- $(".fixbutton-bg").on "mouseup", ->
- # $(".fixbutton-burger").stop().animate({"scale": 1, "left": 0}, 600, "easeOutElastic")
-
-
-
-window.Fixbutton = Fixbutton
diff --git a/src/Ui/media/Infopanel.coffee b/src/Ui/media/Infopanel.coffee
deleted file mode 100644
index 3a490364..00000000
--- a/src/Ui/media/Infopanel.coffee
+++ /dev/null
@@ -1,57 +0,0 @@
-class Infopanel
- constructor: (@elem) ->
- @visible = false
-
- show: (closed=false) =>
- @elem.parent().addClass("visible")
- if closed
- @close()
- else
- @open()
-
- unfold: =>
- @elem.toggleClass("unfolded")
- return false
-
- updateEvents: =>
- @elem.off("click")
- @elem.find(".close").off("click")
- @elem.find(".line").off("click")
-
- @elem.find(".line").on("click", @unfold)
-
- if @elem.hasClass("closed")
- @elem.on "click", =>
- @onOpened()
- @open()
- else
- @elem.find(".close").on "click", =>
- @onClosed()
- @close()
-
- hide: =>
- @elem.parent().removeClass("visible")
-
- close: =>
- @elem.addClass("closed")
- @updateEvents()
- return false
-
- open: =>
- @elem.removeClass("closed")
- @updateEvents()
- return false
-
- setTitle: (line1, line2) =>
- @elem.find(".line-1").text(line1)
- @elem.find(".line-2").text(line2)
-
- setClosedNum: (num) =>
- @elem.find(".closed-num").text(num)
-
- setAction: (title, func) =>
- @elem.find(".button").text(title).off("click").on("click", func)
-
-
-
-window.Infopanel = Infopanel
diff --git a/src/Ui/media/Loading.coffee b/src/Ui/media/Loading.coffee
deleted file mode 100644
index 8e35ce66..00000000
--- a/src/Ui/media/Loading.coffee
+++ /dev/null
@@ -1,91 +0,0 @@
-class Loading
- constructor: (@wrapper) ->
- if window.show_loadingscreen then @showScreen()
- @timer_hide = null
- @timer_set = null
-
- setProgress: (percent) ->
- if @timer_hide
- clearInterval @timer_hide
- @timer_set = RateLimit 500, ->
- $(".progressbar").css("transform": "scaleX(#{parseInt(percent*100)/100})").css("opacity", "1").css("display", "block")
-
- hideProgress: ->
- @log "hideProgress"
- if @timer_set
- clearInterval @timer_set
- @timer_hide = setTimeout ( =>
- $(".progressbar").css("transform": "scaleX(1)").css("opacity", "0").hideLater(1000)
- ), 300
-
-
- showScreen: ->
- $(".loadingscreen").css("display", "block").addClassLater("ready")
- @screen_visible = true
- @printLine " Connecting..."
-
-
- showTooLarge: (site_info) ->
- @log "Displaying large site confirmation"
- if $(".console .button-setlimit").length == 0 # Not displaying it yet
- line = @printLine("Site size: #{parseInt(site_info.settings.size/1024/1024)}MB is larger than default allowed #{parseInt(site_info.size_limit)}MB", "warning")
- button = $("" + "Open site and set size limit to #{site_info.next_size_limit}MB" + " ")
- button.on "click", =>
- button.addClass("loading")
- return @wrapper.setSizeLimit(site_info.next_size_limit)
- line.after(button)
- setTimeout (=>
- @printLine('Ready.')
- ), 100
-
- showTrackerTorBridge: (server_info) ->
- if $(".console .button-settrackerbridge").length == 0 and not server_info.tor_use_meek_bridges
- line = @printLine("Tracker connection error detected.", "error")
- button = $("" + "Use Tor meek bridges for tracker connections" + " ")
- button.on "click", =>
- button.addClass("loading")
- @wrapper.ws.cmd "configSet", ["tor_use_bridges", ""]
- @wrapper.ws.cmd "configSet", ["trackers_proxy", "tor"]
- @wrapper.ws.cmd "siteUpdate", {address: @wrapper.site_info.address, announce: true}
- @wrapper.reloadIframe()
- return false
- line.after(button)
- if not server_info.tor_has_meek_bridges
- button.addClass("disabled")
- @printLine("No meek bridge support in your client, please download the latest bundle .", "warning")
-
- # We dont need loadingscreen anymore
- hideScreen: ->
- @log "hideScreen"
- if not $(".loadingscreen").hasClass("done") # Only if its not animating already
- if @screen_visible # Hide with animate
- $(".loadingscreen").addClass("done").removeLater(2000)
- else # Not visible, just remove
- $(".loadingscreen").remove()
- @screen_visible = false
-
-
- # Append text to last line of loadingscreen
- print: (text, type="normal") ->
- if not @screen_visible then return false
- $(".loadingscreen .console .cursor").remove() # Remove previous cursor
- last_line = $(".loadingscreen .console .console-line:last-child")
- if type == "error" then text = "#{text} "
- last_line.html(last_line.html()+text)
-
-
- # Add line to loading screen
- printLine: (text, type="normal") ->
- if not @screen_visible then return false
- $(".loadingscreen .console .cursor").remove() # Remove previous cursor
- if type == "error" then text = "#{text} " else text = text+" "
-
- line = $(" #{text}
").appendTo(".loadingscreen .console")
- if type == "warning" then line.addClass("console-warning")
- return line
-
- log: (args...) ->
- console.log "[Loading]", args...
-
-
-window.Loading = Loading
diff --git a/src/Ui/media/Notifications.coffee b/src/Ui/media/Notifications.coffee
deleted file mode 100644
index 8898b645..00000000
--- a/src/Ui/media/Notifications.coffee
+++ /dev/null
@@ -1,89 +0,0 @@
-class Notifications
- constructor: (@elem) ->
- @
-
- test: ->
- setTimeout (=>
- @add("connection", "error", "Connection lost to UiServer on localhost !")
- @add("message-Anyone", "info", "New from Anyone .")
- ), 1000
- setTimeout (=>
- @add("connection", "done", "UiServer connection recovered.", 5000)
- ), 3000
-
-
- add: (id, type, body, timeout=0) ->
- id = id.replace /[^A-Za-z0-9-]/g, ""
- # Close notifications with same id
- for elem in $(".notification-#{id}")
- @close $(elem)
-
- # Create element
- elem = $(".notification.template", @elem).clone().removeClass("template")
- elem.addClass("notification-#{type}").addClass("notification-#{id}")
- if type == "progress"
- elem.addClass("notification-done")
-
- # Update text
- if type == "error"
- $(".notification-icon", elem).html("!")
- else if type == "done"
- $(".notification-icon", elem).html("
")
- else if type == "progress"
- $(".notification-icon", elem).html("
")
- else if type == "ask"
- $(".notification-icon", elem).html("?")
- else
- $(".notification-icon", elem).html("i")
-
- if typeof(body) == "string"
- $(".body", elem).html(""+body+"
")
- else
- $(".body", elem).html("").append(body)
-
- elem.appendTo(@elem)
-
- # Timeout
- if timeout
- $(".close", elem).remove() # No need of close button
- setTimeout (=>
- @close elem
- ), timeout
-
- # Animate
- width = Math.min(elem.outerWidth() + 50, 580)
- if not timeout then width += 20 # Add space for close button
- if elem.outerHeight() > 55 then elem.addClass("long")
- elem.css({"width": "50px", "transform": "scale(0.01)"})
- elem.animate({"scale": 1}, 800, "easeOutElastic")
- elem.animate({"width": width}, 700, "easeInOutCubic")
- $(".body", elem).css("width": (width - 50))
- $(".body", elem).cssLater("box-shadow", "0px 0px 5px rgba(0,0,0,0.1)", 1000)
-
- # Close button or Confirm button
- $(".close, .button", elem).on "click", =>
- @close elem
- return false
-
- # Select list
- $(".select", elem).on "click", =>
- @close elem
-
- # Input enter
- $("input", elem).on "keyup", (e) =>
- if e.keyCode == 13
- @close elem
-
- return elem
-
-
- close: (elem) ->
- elem.stop().animate {"width": 0, "opacity": 0}, 700, "easeInOutCubic"
- elem.slideUp 300, (-> elem.remove())
-
-
- log: (args...) ->
- console.log "[Notifications]", args...
-
-
-window.Notifications = Notifications
diff --git a/src/Ui/media/Wrapper.coffee b/src/Ui/media/Wrapper.coffee
deleted file mode 100644
index 1b98855e..00000000
--- a/src/Ui/media/Wrapper.coffee
+++ /dev/null
@@ -1,714 +0,0 @@
-class Wrapper
- constructor: (ws_url) ->
- @log "Created!"
-
- @loading = new Loading(@)
- @notifications = new Notifications($(".notifications"))
- @infopanel = new Infopanel($(".infopanel"))
- @infopanel.onClosed = =>
- @ws.cmd("siteSetSettingsValue", ["modified_files_notification", false])
- @infopanel.onOpened = =>
- @ws.cmd("siteSetSettingsValue", ["modified_files_notification", true])
- @fixbutton = new Fixbutton()
-
- window.addEventListener("message", @onMessageInner, false)
- @inner = document.getElementById("inner-iframe").contentWindow
- @ws = new ZeroWebsocket(ws_url)
- @ws.next_message_id = 1000000 # Avoid messageid collision :)
- @ws.onOpen = @onOpenWebsocket
- @ws.onClose = @onCloseWebsocket
- @ws.onMessage = @onMessageWebsocket
- @ws.connect()
- @ws_error = null # Ws error message
-
- @next_cmd_message_id = -1
-
- @site_info = null # Hold latest site info
- @server_info = null # Hold latest server info
- @event_site_info = $.Deferred() # Event when site_info received
- @inner_loaded = false # If iframe loaded or not
- @inner_ready = false # Inner frame ready to receive messages
- @wrapperWsInited = false # Wrapper notified on websocket open
- @site_error = null # Latest failed file download
- @address = null
- @opener_tested = false
- @announcer_line = null
- @web_notifications = {}
- @is_title_changed = false
-
- @allowed_event_constructors = [window.MouseEvent, window.KeyboardEvent, window.PointerEvent] # Allowed event constructors
-
- window.onload = @onPageLoad # On iframe loaded
- window.onhashchange = (e) => # On hash change
- @log "Hashchange", window.location.hash
- if window.location.hash
- src = $("#inner-iframe").attr("src").replace(/#.*/, "")+window.location.hash
- $("#inner-iframe").attr("src", src)
-
- window.onpopstate = (e) =>
- @sendInner {"cmd": "wrapperPopState", "params": {"href": document.location.href, "state": e.state}}
-
- $("#inner-iframe").focus()
-
-
- verifyEvent: (allowed_target, e) =>
- if not e.originalEvent.isTrusted
- throw "Event not trusted"
-
- if e.originalEvent.constructor not in @allowed_event_constructors
- throw "Invalid event constructor: #{e.constructor} not in #{JSON.stringify(@allowed_event_constructors)}"
-
- if e.originalEvent.currentTarget != allowed_target[0]
- throw "Invalid event target: #{e.originalEvent.currentTarget} != #{allowed_target[0]}"
-
- # Incoming message from UiServer websocket
- onMessageWebsocket: (e) =>
- message = JSON.parse(e.data)
- @handleMessageWebsocket(message)
-
- handleMessageWebsocket: (message) =>
- cmd = message.cmd
- if cmd == "response"
- if @ws.waiting_cb[message.to]? # We are waiting for response
- @ws.waiting_cb[message.to](message.result)
- else
- @sendInner message # Pass message to inner frame
- else if cmd == "notification" # Display notification
- type = message.params[0]
- id = "notification-ws-#{message.id}"
- if "-" in message.params[0] # - in first param: message id defined
- [id, type] = message.params[0].split("-")
- @notifications.add(id, type, message.params[1], message.params[2])
- else if cmd == "progress" # Display notification
- @actionProgress(message)
- else if cmd == "prompt" # Prompt input
- @displayPrompt message.params[0], message.params[1], message.params[2], message.params[3], (res) =>
- @ws.response message.id, res
- else if cmd == "confirm" # Confirm action
- @displayConfirm message.params[0], message.params[1], (res) =>
- @ws.response message.id, res
- else if cmd == "setSiteInfo"
- @sendInner message # Pass to inner frame
- if message.params.address == @address # Current page
- @setSiteInfo message.params
- @updateProgress message.params
- else if cmd == "setAnnouncerInfo"
- @sendInner message # Pass to inner frame
- if message.params.address == @address # Current page
- @setAnnouncerInfo message.params
- @updateProgress message.params
- else if cmd == "error"
- @notifications.add("notification-#{message.id}", "error", message.params, 0)
- else if cmd == "updating" # Close connection
- @log "Updating: Closing websocket"
- @ws.ws.close()
- @ws.onCloseWebsocket(null, 4000)
- else if cmd == "redirect"
- window.top.location = message.params
- else if cmd == "injectHtml"
- $("body").append(message.params)
- else if cmd == "injectScript"
- script_tag = $("
- ZeroNet requires JavaScript support. If you use NoScript/Tor browser: Click on toolbar icon with the notification and choose "Temp. TRUSTED" for 127.0.0.1.
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-