Delete src directory

This commit is contained in:
wupg98 2023-09-02 07:46:50 +02:00 committed by GitHub
parent 829e825fdd
commit c661f0b062
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
228 changed files with 0 additions and 32719 deletions

View File

@ -1,673 +0,0 @@
import argparse
import sys
import os
import locale
import re
import configparser
import logging
import logging.handlers
import stat
import time
class Config(object):
def __init__(self, argv):
self.version = "0.7.2"
self.rev = 4555
self.argv = argv
self.action = None
self.test_parser = None
self.pending_changes = {}
self.need_restart = False
self.keys_api_change_allowed = set([
"tor", "fileserver_port", "language", "tor_use_bridges", "trackers_proxy", "trackers",
"trackers_file", "open_browser", "log_level", "fileserver_ip_type", "ip_external", "offline",
"threads_fs_read", "threads_fs_write", "threads_crypt", "threads_db"
])
self.keys_restart_need = set([
"tor", "fileserver_port", "fileserver_ip_type", "threads_fs_read", "threads_fs_write", "threads_crypt", "threads_db"
])
self.start_dir = self.getStartDir()
self.config_file = self.start_dir + "/zeronet.conf"
self.data_dir = self.start_dir + "/data"
self.log_dir = self.start_dir + "/log"
self.openssl_lib_file = None
self.openssl_bin_file = None
self.trackers_file = False
self.createParser()
self.createArguments()
def createParser(self):
# Create parser
self.parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
self.parser.register('type', 'bool', self.strToBool)
self.subparsers = self.parser.add_subparsers(title="Action to perform", dest="action")
def __str__(self):
return str(self.arguments).replace("Namespace", "Config") # Using argparse str output
# Convert string to bool
def strToBool(self, v):
return v.lower() in ("yes", "true", "t", "1")
def getStartDir(self):
this_file = os.path.abspath(__file__).replace("\\", "/").rstrip("cd")
if "--start_dir" in self.argv:
start_dir = self.argv[self.argv.index("--start_dir") + 1]
elif this_file.endswith("/Contents/Resources/core/src/Config.py"):
# Running as ZeroNet.app
if this_file.startswith("/Application") or this_file.startswith("/private") or this_file.startswith(os.path.expanduser("~/Library")):
# Runnig from non-writeable directory, put data to Application Support
start_dir = os.path.expanduser("~/Library/Application Support/ZeroNet")
else:
# Running from writeable directory put data next to .app
start_dir = re.sub("/[^/]+/Contents/Resources/core/src/Config.py", "", this_file)
elif this_file.endswith("/core/src/Config.py"):
# Running as exe or source is at Application Support directory, put var files to outside of core dir
start_dir = this_file.replace("/core/src/Config.py", "")
elif this_file.endswith("usr/share/zeronet/src/Config.py"):
# Running from non-writeable location, e.g., AppImage
start_dir = os.path.expanduser("~/ZeroNet")
else:
start_dir = "."
return start_dir
# Create command line arguments
def createArguments(self):
trackers = [
"zero://boot3rdez4rzn36x.onion:15441",
"zero://zero.booth.moe#f36ca555bee6ba216b14d10f38c16f7769ff064e0e37d887603548cc2e64191d:443", # US/NY
"udp://tracker.coppersurfer.tk:6969", # DE
"udp://104.238.198.186:8000", # US/LA
"udp://retracker.akado-ural.ru:80", # RU
"http://h4.trakx.nibba.trade:80/announce", # US/VA
"http://open.acgnxtracker.com:80/announce", # DE
"http://tracker.bt4g.com:2095/announce", # Cloudflare
"zero://2602:ffc5::c5b2:5360:26312" # US/ATL
]
# Platform specific
if sys.platform.startswith("win"):
coffeescript = "type %s | tools\\coffee\\coffee.cmd"
else:
coffeescript = None
try:
language, enc = locale.getdefaultlocale()
language = language.lower().replace("_", "-")
if language not in ["pt-br", "zh-tw"]:
language = language.split("-")[0]
except Exception:
language = "en"
use_openssl = True
if repr(1483108852.565) != "1483108852.565": # Fix for weird Android issue
fix_float_decimals = True
else:
fix_float_decimals = False
config_file = self.start_dir + "/zeronet.conf"
data_dir = self.start_dir + "/data"
log_dir = self.start_dir + "/log"
ip_local = ["127.0.0.1", "::1"]
# Main
action = self.subparsers.add_parser("main", help='Start UiServer and FileServer (default)')
# SiteCreate
action = self.subparsers.add_parser("siteCreate", help='Create a new site')
action.register('type', 'bool', self.strToBool)
action.add_argument('--use_master_seed', help="Allow created site's private key to be recovered using the master seed in users.json (default: True)", type="bool", choices=[True, False], default=True)
# SiteNeedFile
action = self.subparsers.add_parser("siteNeedFile", help='Get a file from site')
action.add_argument('address', help='Site address')
action.add_argument('inner_path', help='File inner path')
# SiteDownload
action = self.subparsers.add_parser("siteDownload", help='Download a new site')
action.add_argument('address', help='Site address')
# SiteSign
action = self.subparsers.add_parser("siteSign", help='Update and sign content.json: address [privatekey]')
action.add_argument('address', help='Site to sign')
action.add_argument('privatekey', help='Private key (default: ask on execute)', nargs='?')
action.add_argument('--inner_path', help='File you want to sign (default: content.json)',
default="content.json", metavar="inner_path")
action.add_argument('--remove_missing_optional', help='Remove optional files that is not present in the directory', action='store_true')
action.add_argument('--publish', help='Publish site after the signing', action='store_true')
# SitePublish
action = self.subparsers.add_parser("sitePublish", help='Publish site to other peers: address')
action.add_argument('address', help='Site to publish')
action.add_argument('peer_ip', help='Peer ip to publish (default: random peers ip from tracker)',
default=None, nargs='?')
action.add_argument('peer_port', help='Peer port to publish (default: random peer port from tracker)',
default=15441, nargs='?')
action.add_argument('--inner_path', help='Content.json you want to publish (default: content.json)',
default="content.json", metavar="inner_path")
# SiteVerify
action = self.subparsers.add_parser("siteVerify", help='Verify site files using sha512: address')
action.add_argument('address', help='Site to verify')
# SiteCmd
action = self.subparsers.add_parser("siteCmd", help='Execute a ZeroFrame API command on a site')
action.add_argument('address', help='Site address')
action.add_argument('cmd', help='API command name')
action.add_argument('parameters', help='Parameters of the command', nargs='?')
# dbRebuild
action = self.subparsers.add_parser("dbRebuild", help='Rebuild site database cache')
action.add_argument('address', help='Site to rebuild')
# dbQuery
action = self.subparsers.add_parser("dbQuery", help='Query site sql cache')
action.add_argument('address', help='Site to query')
action.add_argument('query', help='Sql query')
# PeerPing
action = self.subparsers.add_parser("peerPing", help='Send Ping command to peer')
action.add_argument('peer_ip', help='Peer ip')
action.add_argument('peer_port', help='Peer port', nargs='?')
# PeerGetFile
action = self.subparsers.add_parser("peerGetFile", help='Request and print a file content from peer')
action.add_argument('peer_ip', help='Peer ip')
action.add_argument('peer_port', help='Peer port')
action.add_argument('site', help='Site address')
action.add_argument('filename', help='File name to request')
action.add_argument('--benchmark', help='Request file 10x then displays the total time', action='store_true')
# PeerCmd
action = self.subparsers.add_parser("peerCmd", help='Request and print a file content from peer')
action.add_argument('peer_ip', help='Peer ip')
action.add_argument('peer_port', help='Peer port')
action.add_argument('cmd', help='Command to execute')
action.add_argument('parameters', help='Parameters to command', nargs='?')
# CryptSign
action = self.subparsers.add_parser("cryptSign", help='Sign message using Bitcoin private key')
action.add_argument('message', help='Message to sign')
action.add_argument('privatekey', help='Private key')
# Crypt Verify
action = self.subparsers.add_parser("cryptVerify", help='Verify message using Bitcoin public address')
action.add_argument('message', help='Message to verify')
action.add_argument('sign', help='Signiture for message')
action.add_argument('address', help='Signer\'s address')
# Crypt GetPrivatekey
action = self.subparsers.add_parser("cryptGetPrivatekey", help='Generate a privatekey from master seed')
action.add_argument('master_seed', help='Source master seed')
action.add_argument('site_address_index', help='Site address index', type=int)
action = self.subparsers.add_parser("getConfig", help='Return json-encoded info')
action = self.subparsers.add_parser("testConnection", help='Testing')
action = self.subparsers.add_parser("testAnnounce", help='Testing')
self.test_parser = self.subparsers.add_parser("test", help='Run a test')
self.test_parser.add_argument('test_name', help='Test name', nargs="?")
# self.test_parser.add_argument('--benchmark', help='Run the tests multiple times to measure the performance', action='store_true')
# Config parameters
self.parser.add_argument('--verbose', help='More detailed logging', action='store_true')
self.parser.add_argument('--debug', help='Debug mode', action='store_true')
self.parser.add_argument('--silent', help='Only log errors to terminal output', action='store_true')
self.parser.add_argument('--debug_socket', help='Debug socket connections', action='store_true')
self.parser.add_argument('--merge_media', help='Merge all.js and all.css', action='store_true')
self.parser.add_argument('--batch', help="Batch mode (No interactive input for commands)", action='store_true')
self.parser.add_argument('--start_dir', help='Path of working dir for variable content (data, log, .conf)', default=self.start_dir, metavar="path")
self.parser.add_argument('--config_file', help='Path of config file', default=config_file, metavar="path")
self.parser.add_argument('--data_dir', help='Path of data directory', default=data_dir, metavar="path")
self.parser.add_argument('--console_log_level', help='Level of logging to console', default="default", choices=["default", "DEBUG", "INFO", "ERROR", "off"])
self.parser.add_argument('--log_dir', help='Path of logging directory', default=log_dir, metavar="path")
self.parser.add_argument('--log_level', help='Level of logging to file', default="DEBUG", choices=["DEBUG", "INFO", "ERROR", "off"])
self.parser.add_argument('--log_rotate', help='Log rotate interval', default="daily", choices=["hourly", "daily", "weekly", "off"])
self.parser.add_argument('--log_rotate_backup_count', help='Log rotate backup count', default=5, type=int)
self.parser.add_argument('--language', help='Web interface language', default=language, metavar='language')
self.parser.add_argument('--ui_ip', help='Web interface bind address', default="127.0.0.1", metavar='ip')
self.parser.add_argument('--ui_port', help='Web interface bind port', default=43110, type=int, metavar='port')
self.parser.add_argument('--ui_restrict', help='Restrict web access', default=False, metavar='ip', nargs='*')
self.parser.add_argument('--ui_host', help='Allow access using this hosts', metavar='host', nargs='*')
self.parser.add_argument('--ui_trans_proxy', help='Allow access using a transparent proxy', action='store_true')
self.parser.add_argument('--open_browser', help='Open homepage in web browser automatically',
nargs='?', const="default_browser", metavar='browser_name')
self.parser.add_argument('--homepage', help='Web interface Homepage', default='1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D',
metavar='address')
self.parser.add_argument('--updatesite', help='Source code update site', default='1uPDaT3uSyWAPdCv1WkMb5hBQjWSNNACf',
metavar='address')
self.parser.add_argument('--dist_type', help='Type of installed distribution', default='source')
self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, type=int, metavar='limit')
self.parser.add_argument('--file_size_limit', help='Maximum per file size limit in MB', default=10, type=int, metavar='limit')
self.parser.add_argument('--connected_limit', help='Max connected peer per site', default=8, type=int, metavar='connected_limit')
self.parser.add_argument('--global_connected_limit', help='Max connections', default=512, type=int, metavar='global_connected_limit')
self.parser.add_argument('--workers', help='Download workers per site', default=5, type=int, metavar='workers')
self.parser.add_argument('--fileserver_ip', help='FileServer bind address', default="*", metavar='ip')
self.parser.add_argument('--fileserver_port', help='FileServer bind port (0: randomize)', default=0, type=int, metavar='port')
self.parser.add_argument('--fileserver_port_range', help='FileServer randomization range', default="10000-40000", metavar='port')
self.parser.add_argument('--fileserver_ip_type', help='FileServer ip type', default="dual", choices=["ipv4", "ipv6", "dual"])
self.parser.add_argument('--ip_local', help='My local ips', default=ip_local, type=int, metavar='ip', nargs='*')
self.parser.add_argument('--ip_external', help='Set reported external ip (tested on start if None)', metavar='ip', nargs='*')
self.parser.add_argument('--offline', help='Disable network communication', action='store_true')
self.parser.add_argument('--disable_udp', help='Disable UDP connections', action='store_true')
self.parser.add_argument('--proxy', help='Socks proxy address', metavar='ip:port')
self.parser.add_argument('--bind', help='Bind outgoing sockets to this address', metavar='ip')
self.parser.add_argument('--trackers', help='Bootstraping torrent trackers', default=trackers, metavar='protocol://address', nargs='*')
self.parser.add_argument('--trackers_file', help='Load torrent trackers dynamically from a file', metavar='path', nargs='*')
self.parser.add_argument('--trackers_proxy', help='Force use proxy to connect to trackers (disable, tor, ip:port)', default="disable")
self.parser.add_argument('--use_libsecp256k1', help='Use Libsecp256k1 liblary for speedup', type='bool', choices=[True, False], default=True)
self.parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup', type='bool', choices=[True, False], default=True)
self.parser.add_argument('--openssl_lib_file', help='Path for OpenSSL library file (default: detect)', default=argparse.SUPPRESS, metavar="path")
self.parser.add_argument('--openssl_bin_file', help='Path for OpenSSL binary file (default: detect)', default=argparse.SUPPRESS, metavar="path")
self.parser.add_argument('--disable_db', help='Disable database updating', action='store_true')
self.parser.add_argument('--disable_encryption', help='Disable connection encryption', action='store_true')
self.parser.add_argument('--force_encryption', help="Enforce encryption to all peer connections", action='store_true')
self.parser.add_argument('--disable_sslcompression', help='Disable SSL compression to save memory',
type='bool', choices=[True, False], default=True)
self.parser.add_argument('--keep_ssl_cert', help='Disable new SSL cert generation on startup', action='store_true')
self.parser.add_argument('--max_files_opened', help='Change maximum opened files allowed by OS to this value on startup',
default=2048, type=int, metavar='limit')
self.parser.add_argument('--stack_size', help='Change thread stack size', default=None, type=int, metavar='thread_stack_size')
self.parser.add_argument('--use_tempfiles', help='Use temporary files when downloading (experimental)',
type='bool', choices=[True, False], default=False)
self.parser.add_argument('--stream_downloads', help='Stream download directly to files (experimental)',
type='bool', choices=[True, False], default=False)
self.parser.add_argument("--msgpack_purepython", help='Use less memory, but a bit more CPU power',
type='bool', choices=[True, False], default=False)
self.parser.add_argument("--fix_float_decimals", help='Fix content.json modification date float precision on verification',
type='bool', choices=[True, False], default=fix_float_decimals)
self.parser.add_argument("--db_mode", choices=["speed", "security"], default="speed")
self.parser.add_argument('--threads_fs_read', help='Number of threads for file read operations', default=1, type=int)
self.parser.add_argument('--threads_fs_write', help='Number of threads for file write operations', default=1, type=int)
self.parser.add_argument('--threads_crypt', help='Number of threads for cryptographic operations', default=2, type=int)
self.parser.add_argument('--threads_db', help='Number of threads for database operations', default=1, type=int)
self.parser.add_argument("--download_optional", choices=["manual", "auto"], default="manual")
self.parser.add_argument('--coffeescript_compiler', help='Coffeescript compiler for developing', default=coffeescript,
metavar='executable_path')
self.parser.add_argument('--tor', help='enable: Use only for Tor peers, always: Use Tor for every connection', choices=["disable", "enable", "always"], default='enable')
self.parser.add_argument('--tor_controller', help='Tor controller address', metavar='ip:port', default='127.0.0.1:9051')
self.parser.add_argument('--tor_proxy', help='Tor proxy address', metavar='ip:port', default='127.0.0.1:9050')
self.parser.add_argument('--tor_password', help='Tor controller password', metavar='password')
self.parser.add_argument('--tor_use_bridges', help='Use obfuscated bridge relays to avoid Tor block', action='store_true')
self.parser.add_argument('--tor_hs_limit', help='Maximum number of hidden services in Tor always mode', metavar='limit', type=int, default=10)
self.parser.add_argument('--tor_hs_port', help='Hidden service port in Tor always mode', metavar='limit', type=int, default=15441)
self.parser.add_argument('--version', action='version', version='ZeroNet %s r%s' % (self.version, self.rev))
self.parser.add_argument('--end', help='Stop multi value argument parsing', action='store_true')
return self.parser
def loadTrackersFile(self):
if not self.trackers_file:
return None
self.trackers = self.arguments.trackers[:]
for trackers_file in self.trackers_file:
try:
if trackers_file.startswith("/"): # Absolute
trackers_file_path = trackers_file
elif trackers_file.startswith("{data_dir}"): # Relative to data_dir
trackers_file_path = trackers_file.replace("{data_dir}", self.data_dir)
else: # Relative to zeronet.py
trackers_file_path = self.start_dir + "/" + trackers_file
for line in open(trackers_file_path):
tracker = line.strip()
if "://" in tracker and tracker not in self.trackers:
self.trackers.append(tracker)
except Exception as err:
print("Error loading trackers file: %s" % err)
# Find arguments specified for current action
def getActionArguments(self):
back = {}
arguments = self.parser._subparsers._group_actions[0].choices[self.action]._actions[1:] # First is --version
for argument in arguments:
back[argument.dest] = getattr(self, argument.dest)
return back
# Try to find action from argv
def getAction(self, argv):
actions = [list(action.choices.keys()) for action in self.parser._actions if action.dest == "action"][0] # Valid actions
found_action = False
for action in actions: # See if any in argv
if action in argv:
found_action = action
break
return found_action
# Move plugin parameters to end of argument list
def moveUnknownToEnd(self, argv, default_action):
valid_actions = sum([action.option_strings for action in self.parser._actions], [])
valid_parameters = []
plugin_parameters = []
plugin = False
for arg in argv:
if arg.startswith("--"):
if arg not in valid_actions:
plugin = True
else:
plugin = False
elif arg == default_action:
plugin = False
if plugin:
plugin_parameters.append(arg)
else:
valid_parameters.append(arg)
return valid_parameters + plugin_parameters
def getParser(self, argv):
action = self.getAction(argv)
if not action:
return self.parser
else:
return self.subparsers.choices[action]
# Parse arguments from config file and command line
def parse(self, silent=False, parse_config=True):
argv = self.argv[:] # Copy command line arguments
current_parser = self.getParser(argv)
if silent: # Don't display messages or quit on unknown parameter
original_print_message = self.parser._print_message
original_exit = self.parser.exit
def silencer(parser, function_name):
parser.exited = True
return None
current_parser.exited = False
current_parser._print_message = lambda *args, **kwargs: silencer(current_parser, "_print_message")
current_parser.exit = lambda *args, **kwargs: silencer(current_parser, "exit")
self.parseCommandline(argv, silent) # Parse argv
self.setAttributes()
if parse_config:
argv = self.parseConfig(argv) # Add arguments from config file
self.parseCommandline(argv, silent) # Parse argv
self.setAttributes()
if not silent:
if self.fileserver_ip != "*" and self.fileserver_ip not in self.ip_local:
self.ip_local.append(self.fileserver_ip)
if silent: # Restore original functions
if current_parser.exited and self.action == "main": # Argument parsing halted, don't start ZeroNet with main action
self.action = None
current_parser._print_message = original_print_message
current_parser.exit = original_exit
self.loadTrackersFile()
# Parse command line arguments
def parseCommandline(self, argv, silent=False):
# Find out if action is specificed on start
action = self.getAction(argv)
if not action:
argv.append("--end")
argv.append("main")
action = "main"
argv = self.moveUnknownToEnd(argv, action)
if silent:
res = self.parser.parse_known_args(argv[1:])
if res:
self.arguments = res[0]
else:
self.arguments = {}
else:
self.arguments = self.parser.parse_args(argv[1:])
# Parse config file
def parseConfig(self, argv):
# Find config file path from parameters
if "--config_file" in argv:
self.config_file = argv[argv.index("--config_file") + 1]
# Load config file
if os.path.isfile(self.config_file):
config = configparser.RawConfigParser(allow_no_value=True, strict=False)
config.read(self.config_file)
for section in config.sections():
for key, val in config.items(section):
if val == "True":
val = None
if section != "global": # If not global prefix key with section
key = section + "_" + key
if key == "open_browser": # Prefer config file value over cli argument
while "--%s" % key in argv:
pos = argv.index("--open_browser")
del argv[pos:pos + 2]
argv_extend = ["--%s" % key]
if val:
for line in val.strip().split("\n"): # Allow multi-line values
argv_extend.append(line)
if "\n" in val:
argv_extend.append("--end")
argv = argv[:1] + argv_extend + argv[1:]
return argv
# Return command line value of given argument
def getCmdlineValue(self, key):
if key not in self.argv:
return None
argv_index = self.argv.index(key)
if argv_index == len(self.argv) - 1: # last arg, test not specified
return None
return self.argv[argv_index + 1]
# Expose arguments as class attributes
def setAttributes(self):
# Set attributes from arguments
if self.arguments:
args = vars(self.arguments)
for key, val in args.items():
if type(val) is list:
val = val[:]
if key in ("data_dir", "log_dir", "start_dir", "openssl_bin_file", "openssl_lib_file"):
if val:
val = val.replace("\\", "/")
setattr(self, key, val)
def loadPlugins(self):
from Plugin import PluginManager
@PluginManager.acceptPlugins
class ConfigPlugin(object):
def __init__(self, config):
self.argv = config.argv
self.parser = config.parser
self.subparsers = config.subparsers
self.test_parser = config.test_parser
self.getCmdlineValue = config.getCmdlineValue
self.createArguments()
def createArguments(self):
pass
ConfigPlugin(self)
def saveValue(self, key, value):
if not os.path.isfile(self.config_file):
content = ""
else:
content = open(self.config_file).read()
lines = content.splitlines()
global_line_i = None
key_line_i = None
i = 0
for line in lines:
if line.strip() == "[global]":
global_line_i = i
if line.startswith(key + " =") or line == key:
key_line_i = i
i += 1
if key_line_i and len(lines) > key_line_i + 1:
while True: # Delete previous multiline values
is_value_line = lines[key_line_i + 1].startswith(" ") or lines[key_line_i + 1].startswith("\t")
if not is_value_line:
break
del lines[key_line_i + 1]
if value is None: # Delete line
if key_line_i:
del lines[key_line_i]
else: # Add / update
if type(value) is list:
value_lines = [""] + [str(line).replace("\n", "").replace("\r", "") for line in value]
else:
value_lines = [str(value).replace("\n", "").replace("\r", "")]
new_line = "%s = %s" % (key, "\n ".join(value_lines))
if key_line_i: # Already in the config, change the line
lines[key_line_i] = new_line
elif global_line_i is None: # No global section yet, append to end of file
lines.append("[global]")
lines.append(new_line)
else: # Has global section, append the line after it
lines.insert(global_line_i + 1, new_line)
open(self.config_file, "w").write("\n".join(lines))
def getServerInfo(self):
from Plugin import PluginManager
import main
info = {
"platform": sys.platform,
"fileserver_ip": self.fileserver_ip,
"fileserver_port": self.fileserver_port,
"ui_ip": self.ui_ip,
"ui_port": self.ui_port,
"version": self.version,
"rev": self.rev,
"language": self.language,
"debug": self.debug,
"plugins": PluginManager.plugin_manager.plugin_names,
"log_dir": os.path.abspath(self.log_dir),
"data_dir": os.path.abspath(self.data_dir),
"src_dir": os.path.dirname(os.path.abspath(__file__))
}
try:
info["ip_external"] = main.file_server.port_opened
info["tor_enabled"] = main.file_server.tor_manager.enabled
info["tor_status"] = main.file_server.tor_manager.status
except Exception:
pass
return info
def initConsoleLogger(self):
if self.action == "main":
format = '[%(asctime)s] %(name)s %(message)s'
else:
format = '%(name)s %(message)s'
if self.console_log_level == "default":
if self.silent:
level = logging.ERROR
elif self.debug:
level = logging.DEBUG
else:
level = logging.INFO
else:
level = logging.getLevelName(self.console_log_level)
console_logger = logging.StreamHandler()
console_logger.setFormatter(logging.Formatter(format, "%H:%M:%S"))
console_logger.setLevel(level)
logging.getLogger('').addHandler(console_logger)
def initFileLogger(self):
if self.action == "main":
log_file_path = "%s/debug.log" % self.log_dir
else:
log_file_path = "%s/cmd.log" % self.log_dir
if self.log_rotate == "off":
file_logger = logging.FileHandler(log_file_path, "w", "utf-8")
else:
when_names = {"weekly": "w", "daily": "d", "hourly": "h"}
file_logger = logging.handlers.TimedRotatingFileHandler(
log_file_path, when=when_names[self.log_rotate], interval=1, backupCount=self.log_rotate_backup_count,
encoding="utf8"
)
if os.path.isfile(log_file_path):
file_logger.doRollover() # Always start with empty log file
file_logger.setFormatter(logging.Formatter('[%(asctime)s] %(levelname)-8s %(name)s %(message)s'))
file_logger.setLevel(logging.getLevelName(self.log_level))
logging.getLogger('').setLevel(logging.getLevelName(self.log_level))
logging.getLogger('').addHandler(file_logger)
def initLogging(self, console_logging=None, file_logging=None):
if console_logging == None:
console_logging = self.console_log_level != "off"
if file_logging == None:
file_logging = self.log_level != "off"
# Create necessary files and dirs
if not os.path.isdir(self.log_dir):
os.mkdir(self.log_dir)
try:
os.chmod(self.log_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
except Exception as err:
print("Can't change permission of %s: %s" % (self.log_dir, err))
# Make warning hidden from console
logging.WARNING = 15 # Don't display warnings if not in debug mode
logging.addLevelName(15, "WARNING")
logging.getLogger('').name = "-" # Remove root prefix
self.error_logger = ErrorLogHandler()
self.error_logger.setLevel(logging.getLevelName("ERROR"))
logging.getLogger('').addHandler(self.error_logger)
if console_logging:
self.initConsoleLogger()
if file_logging:
self.initFileLogger()
class ErrorLogHandler(logging.StreamHandler):
def __init__(self):
self.lines = []
return super().__init__()
def emit(self, record):
self.lines.append([time.time(), record.levelname, self.format(record)])
def onNewRecord(self, record):
pass
config = Config(sys.argv)

View File

@ -1,635 +0,0 @@
import socket
import time
import gevent
try:
from gevent.coros import RLock
except:
from gevent.lock import RLock
from Config import config
from Debug import Debug
from util import Msgpack
from Crypt import CryptConnection
from util import helper
class Connection(object):
__slots__ = (
"sock", "sock_wrapped", "ip", "port", "cert_pin", "target_onion", "id", "protocol", "type", "server", "unpacker", "unpacker_bytes", "req_id", "ip_type",
"handshake", "crypt", "connected", "event_connected", "closed", "start_time", "handshake_time", "last_recv_time", "is_private_ip", "is_tracker_connection",
"last_message_time", "last_send_time", "last_sent_time", "incomplete_buff_recv", "bytes_recv", "bytes_sent", "cpu_time", "send_lock",
"last_ping_delay", "last_req_time", "last_cmd_sent", "last_cmd_recv", "bad_actions", "sites", "name", "waiting_requests", "waiting_streams"
)
def __init__(self, server, ip, port, sock=None, target_onion=None, is_tracker_connection=False):
self.sock = sock
self.cert_pin = None
if "#" in ip:
ip, self.cert_pin = ip.split("#")
self.target_onion = target_onion # Requested onion adress
self.id = server.last_connection_id
server.last_connection_id += 1
self.protocol = "?"
self.type = "?"
self.ip_type = "?"
self.port = int(port)
self.setIp(ip)
if helper.isPrivateIp(self.ip) and self.ip not in config.ip_local:
self.is_private_ip = True
else:
self.is_private_ip = False
self.is_tracker_connection = is_tracker_connection
self.server = server
self.unpacker = None # Stream incoming socket messages here
self.unpacker_bytes = 0 # How many bytes the unpacker received
self.req_id = 0 # Last request id
self.handshake = {} # Handshake info got from peer
self.crypt = None # Connection encryption method
self.sock_wrapped = False # Socket wrapped to encryption
self.connected = False
self.event_connected = gevent.event.AsyncResult() # Solves on handshake received
self.closed = False
# Stats
self.start_time = time.time()
self.handshake_time = 0
self.last_recv_time = 0
self.last_message_time = 0
self.last_send_time = 0
self.last_sent_time = 0
self.incomplete_buff_recv = 0
self.bytes_recv = 0
self.bytes_sent = 0
self.last_ping_delay = None
self.last_req_time = 0
self.last_cmd_sent = None
self.last_cmd_recv = None
self.bad_actions = 0
self.sites = 0
self.cpu_time = 0.0
self.send_lock = RLock()
self.name = None
self.updateName()
self.waiting_requests = {} # Waiting sent requests
self.waiting_streams = {} # Waiting response file streams
def setIp(self, ip):
self.ip = ip
self.ip_type = helper.getIpType(ip)
self.updateName()
def createSocket(self):
if helper.getIpType(self.ip) == "ipv6" and not hasattr(socket, "socket_noproxy"):
# Create IPv6 connection as IPv4 when using proxy
return socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
else:
return socket.socket(socket.AF_INET, socket.SOCK_STREAM)
def updateName(self):
self.name = "Conn#%2s %-12s [%s]" % (self.id, self.ip, self.protocol)
def __str__(self):
return self.name
def __repr__(self):
return "<%s>" % self.__str__()
def log(self, text):
self.server.log.debug("%s > %s" % (self.name, text))
def getValidSites(self):
return [key for key, val in self.server.tor_manager.site_onions.items() if val == self.target_onion]
def badAction(self, weight=1):
self.bad_actions += weight
if self.bad_actions > 40:
self.close("Too many bad actions")
elif self.bad_actions > 20:
time.sleep(5)
def goodAction(self):
self.bad_actions = 0
# Open connection to peer and wait for handshake
def connect(self):
self.type = "out"
if self.ip_type == "onion":
if not self.server.tor_manager or not self.server.tor_manager.enabled:
raise Exception("Can't connect to onion addresses, no Tor controller present")
self.sock = self.server.tor_manager.createSocket(self.ip, self.port)
elif config.tor == "always" and helper.isPrivateIp(self.ip) and self.ip not in config.ip_local:
raise Exception("Can't connect to local IPs in Tor: always mode")
elif config.trackers_proxy != "disable" and config.tor != "always" and self.is_tracker_connection:
if config.trackers_proxy == "tor":
self.sock = self.server.tor_manager.createSocket(self.ip, self.port)
else:
import socks
self.sock = socks.socksocket()
proxy_ip, proxy_port = config.trackers_proxy.split(":")
self.sock.set_proxy(socks.PROXY_TYPE_SOCKS5, proxy_ip, int(proxy_port))
else:
self.sock = self.createSocket()
if "TCP_NODELAY" in dir(socket):
self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
timeout_before = self.sock.gettimeout()
self.sock.settimeout(30)
if self.ip_type == "ipv6" and not hasattr(self.sock, "proxy"):
sock_address = (self.ip, self.port, 1, 1)
else:
sock_address = (self.ip, self.port)
self.sock.connect(sock_address)
# Implicit SSL
should_encrypt = not self.ip_type == "onion" and self.ip not in self.server.broken_ssl_ips and self.ip not in config.ip_local
if self.cert_pin:
self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa", cert_pin=self.cert_pin)
self.sock.do_handshake()
self.crypt = "tls-rsa"
self.sock_wrapped = True
elif should_encrypt and "tls-rsa" in CryptConnection.manager.crypt_supported:
try:
self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa")
self.sock.do_handshake()
self.crypt = "tls-rsa"
self.sock_wrapped = True
except Exception as err:
if not config.force_encryption:
self.log("Crypt connection error, adding %s:%s as broken ssl. %s" % (self.ip, self.port, Debug.formatException(err)))
self.server.broken_ssl_ips[self.ip] = True
self.sock.close()
self.crypt = None
self.sock = self.createSocket()
self.sock.settimeout(30)
self.sock.connect(sock_address)
# Detect protocol
self.send({"cmd": "handshake", "req_id": 0, "params": self.getHandshakeInfo()})
event_connected = self.event_connected
gevent.spawn(self.messageLoop)
connect_res = event_connected.get() # Wait for handshake
self.sock.settimeout(timeout_before)
return connect_res
# Handle incoming connection
def handleIncomingConnection(self, sock):
self.log("Incoming connection...")
if "TCP_NODELAY" in dir(socket):
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
self.type = "in"
if self.ip not in config.ip_local: # Clearnet: Check implicit SSL
try:
first_byte = sock.recv(1, gevent.socket.MSG_PEEK)
if first_byte == b"\x16":
self.log("Crypt in connection using implicit SSL")
self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa", True)
self.sock_wrapped = True
self.crypt = "tls-rsa"
except Exception as err:
self.log("Socket peek error: %s" % Debug.formatException(err))
self.messageLoop()
def getMsgpackUnpacker(self):
if self.handshake and self.handshake.get("use_bin_type"):
return Msgpack.getUnpacker(fallback=True, decode=False)
else: # Backward compatibility for <0.7.0
return Msgpack.getUnpacker(fallback=True, decode=True)
# Message loop for connection
def messageLoop(self):
if not self.sock:
self.log("Socket error: No socket found")
return False
self.protocol = "v2"
self.updateName()
self.connected = True
buff_len = 0
req_len = 0
self.unpacker_bytes = 0
try:
while not self.closed:
buff = self.sock.recv(64 * 1024)
if not buff:
break # Connection closed
buff_len = len(buff)
# Statistics
self.last_recv_time = time.time()
self.incomplete_buff_recv += 1
self.bytes_recv += buff_len
self.server.bytes_recv += buff_len
req_len += buff_len
if not self.unpacker:
self.unpacker = self.getMsgpackUnpacker()
self.unpacker_bytes = 0
self.unpacker.feed(buff)
self.unpacker_bytes += buff_len
while True:
try:
message = next(self.unpacker)
except StopIteration:
break
if not type(message) is dict:
if config.debug_socket:
self.log("Invalid message type: %s, content: %r, buffer: %r" % (type(message), message, buff[0:16]))
raise Exception("Invalid message type: %s" % type(message))
# Stats
self.incomplete_buff_recv = 0
stat_key = message.get("cmd", "unknown")
if stat_key == "response" and "to" in message:
cmd_sent = self.waiting_requests.get(message["to"], {"cmd": "unknown"})["cmd"]
stat_key = "response: %s" % cmd_sent
if stat_key == "update":
stat_key = "update: %s" % message["params"]["site"]
self.server.stat_recv[stat_key]["bytes"] += req_len
self.server.stat_recv[stat_key]["num"] += 1
if "stream_bytes" in message:
self.server.stat_recv[stat_key]["bytes"] += message["stream_bytes"]
req_len = 0
# Handle message
if "stream_bytes" in message:
buff_left = self.handleStream(message, buff)
self.unpacker = self.getMsgpackUnpacker()
self.unpacker.feed(buff_left)
self.unpacker_bytes = len(buff_left)
if config.debug_socket:
self.log("Start new unpacker with buff_left: %r" % buff_left)
else:
self.handleMessage(message)
message = None
except Exception as err:
if not self.closed:
self.log("Socket error: %s" % Debug.formatException(err))
self.server.stat_recv["error: %s" % err]["bytes"] += req_len
self.server.stat_recv["error: %s" % err]["num"] += 1
self.close("MessageLoop ended (closed: %s)" % self.closed) # MessageLoop ended, close connection
def getUnpackerUnprocessedBytesNum(self):
if "tell" in dir(self.unpacker):
bytes_num = self.unpacker_bytes - self.unpacker.tell()
else:
bytes_num = self.unpacker._fb_buf_n - self.unpacker._fb_buf_o
return bytes_num
# Stream socket directly to a file
def handleStream(self, message, buff):
stream_bytes_left = message["stream_bytes"]
file = self.waiting_streams[message["to"]]
unprocessed_bytes_num = self.getUnpackerUnprocessedBytesNum()
if unprocessed_bytes_num: # Found stream bytes in unpacker
unpacker_stream_bytes = min(unprocessed_bytes_num, stream_bytes_left)
buff_stream_start = len(buff) - unprocessed_bytes_num
file.write(buff[buff_stream_start:buff_stream_start + unpacker_stream_bytes])
stream_bytes_left -= unpacker_stream_bytes
else:
unpacker_stream_bytes = 0
if config.debug_socket:
self.log(
"Starting stream %s: %s bytes (%s from unpacker, buff size: %s, unprocessed: %s)" %
(message["to"], message["stream_bytes"], unpacker_stream_bytes, len(buff), unprocessed_bytes_num)
)
try:
while 1:
if stream_bytes_left <= 0:
break
stream_buff = self.sock.recv(min(64 * 1024, stream_bytes_left))
if not stream_buff:
break
buff_len = len(stream_buff)
stream_bytes_left -= buff_len
file.write(stream_buff)
# Statistics
self.last_recv_time = time.time()
self.incomplete_buff_recv += 1
self.bytes_recv += buff_len
self.server.bytes_recv += buff_len
except Exception as err:
self.log("Stream read error: %s" % Debug.formatException(err))
if config.debug_socket:
self.log("End stream %s, file pos: %s" % (message["to"], file.tell()))
self.incomplete_buff_recv = 0
self.waiting_requests[message["to"]]["evt"].set(message) # Set the response to event
del self.waiting_streams[message["to"]]
del self.waiting_requests[message["to"]]
if unpacker_stream_bytes:
return buff[buff_stream_start + unpacker_stream_bytes:]
else:
return b""
# My handshake info
def getHandshakeInfo(self):
# No TLS for onion connections
if self.ip_type == "onion":
crypt_supported = []
elif self.ip in self.server.broken_ssl_ips:
crypt_supported = []
else:
crypt_supported = CryptConnection.manager.crypt_supported
# No peer id for onion connections
if self.ip_type == "onion" or self.ip in config.ip_local:
peer_id = ""
else:
peer_id = self.server.peer_id
# Setup peer lock from requested onion address
if self.handshake and self.handshake.get("target_ip", "").endswith(".onion") and self.server.tor_manager.start_onions:
self.target_onion = self.handshake.get("target_ip").replace(".onion", "") # My onion address
if not self.server.tor_manager.site_onions.values():
self.server.log.warning("Unknown target onion address: %s" % self.target_onion)
handshake = {
"version": config.version,
"protocol": "v2",
"use_bin_type": True,
"peer_id": peer_id,
"fileserver_port": self.server.port,
"port_opened": self.server.port_opened.get(self.ip_type, None),
"target_ip": self.ip,
"rev": config.rev,
"crypt_supported": crypt_supported,
"crypt": self.crypt,
"time": int(time.time())
}
if self.target_onion:
handshake["onion"] = self.target_onion
elif self.ip_type == "onion":
handshake["onion"] = self.server.tor_manager.getOnion("global")
if self.is_tracker_connection:
handshake["tracker_connection"] = True
if config.debug_socket:
self.log("My Handshake: %s" % handshake)
return handshake
def setHandshake(self, handshake):
if config.debug_socket:
self.log("Remote Handshake: %s" % handshake)
if handshake.get("peer_id") == self.server.peer_id and not handshake.get("tracker_connection") and not self.is_tracker_connection:
self.close("Same peer id, can't connect to myself")
self.server.peer_blacklist.append((handshake["target_ip"], handshake["fileserver_port"]))
return False
self.handshake = handshake
if handshake.get("port_opened", None) is False and "onion" not in handshake and not self.is_private_ip: # Not connectable
self.port = 0
else:
self.port = int(handshake["fileserver_port"]) # Set peer fileserver port
if handshake.get("use_bin_type") and self.unpacker:
unprocessed_bytes_num = self.getUnpackerUnprocessedBytesNum()
self.log("Changing unpacker to bin type (unprocessed bytes: %s)" % unprocessed_bytes_num)
unprocessed_bytes = self.unpacker.read_bytes(unprocessed_bytes_num)
self.unpacker = self.getMsgpackUnpacker() # Create new unpacker for different msgpack type
self.unpacker_bytes = 0
if unprocessed_bytes:
self.unpacker.feed(unprocessed_bytes)
# Check if we can encrypt the connection
if handshake.get("crypt_supported") and self.ip not in self.server.broken_ssl_ips:
if type(handshake["crypt_supported"][0]) is bytes:
handshake["crypt_supported"] = [item.decode() for item in handshake["crypt_supported"]] # Backward compatibility
if self.ip_type == "onion" or self.ip in config.ip_local:
crypt = None
elif handshake.get("crypt"): # Recommended crypt by server
crypt = handshake["crypt"]
else: # Select the best supported on both sides
crypt = CryptConnection.manager.selectCrypt(handshake["crypt_supported"])
if crypt:
self.crypt = crypt
if self.type == "in" and handshake.get("onion") and not self.ip_type == "onion": # Set incoming connection's onion address
if self.server.ips.get(self.ip) == self:
del self.server.ips[self.ip]
self.setIp(handshake["onion"] + ".onion")
self.log("Changing ip to %s" % self.ip)
self.server.ips[self.ip] = self
self.updateName()
self.event_connected.set(True) # Mark handshake as done
self.event_connected = None
self.handshake_time = time.time()
# Handle incoming message
def handleMessage(self, message):
cmd = message["cmd"]
self.last_message_time = time.time()
self.last_cmd_recv = cmd
if cmd == "response": # New style response
if message["to"] in self.waiting_requests:
if self.last_send_time and len(self.waiting_requests) == 1:
ping = time.time() - self.last_send_time
self.last_ping_delay = ping
self.waiting_requests[message["to"]]["evt"].set(message) # Set the response to event
del self.waiting_requests[message["to"]]
elif message["to"] == 0: # Other peers handshake
ping = time.time() - self.start_time
if config.debug_socket:
self.log("Handshake response: %s, ping: %s" % (message, ping))
self.last_ping_delay = ping
# Server switched to crypt, lets do it also if not crypted already
if message.get("crypt") and not self.sock_wrapped:
self.crypt = message["crypt"]
server = (self.type == "in")
self.log("Crypt out connection using: %s (server side: %s, ping: %.3fs)..." % (self.crypt, server, ping))
self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server, cert_pin=self.cert_pin)
self.sock.do_handshake()
self.sock_wrapped = True
if not self.sock_wrapped and self.cert_pin:
self.close("Crypt connection error: Socket not encrypted, but certificate pin present")
return
self.setHandshake(message)
else:
self.log("Unknown response: %s" % message)
elif cmd:
self.server.num_recv += 1
if cmd == "handshake":
self.handleHandshake(message)
else:
self.server.handleRequest(self, message)
# Incoming handshake set request
def handleHandshake(self, message):
self.setHandshake(message["params"])
data = self.getHandshakeInfo()
data["cmd"] = "response"
data["to"] = message["req_id"]
self.send(data) # Send response to handshake
# Sent crypt request to client
if self.crypt and not self.sock_wrapped:
server = (self.type == "in")
self.log("Crypt in connection using: %s (server side: %s)..." % (self.crypt, server))
try:
self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server, cert_pin=self.cert_pin)
self.sock_wrapped = True
except Exception as err:
if not config.force_encryption:
self.log("Crypt connection error, adding %s:%s as broken ssl. %s" % (self.ip, self.port, Debug.formatException(err)))
self.server.broken_ssl_ips[self.ip] = True
self.close("Broken ssl")
if not self.sock_wrapped and self.cert_pin:
self.close("Crypt connection error: Socket not encrypted, but certificate pin present")
# Send data to connection
def send(self, message, streaming=False):
self.last_send_time = time.time()
if config.debug_socket:
self.log("Send: %s, to: %s, streaming: %s, site: %s, inner_path: %s, req_id: %s" % (
message.get("cmd"), message.get("to"), streaming,
message.get("params", {}).get("site"), message.get("params", {}).get("inner_path"),
message.get("req_id"))
)
if not self.sock:
self.log("Send error: missing socket")
return False
if not self.connected and message.get("cmd") != "handshake":
self.log("Wait for handshake before send request")
self.event_connected.get()
try:
stat_key = message.get("cmd", "unknown")
if stat_key == "response":
stat_key = "response: %s" % self.last_cmd_recv
else:
self.server.num_sent += 1
self.server.stat_sent[stat_key]["num"] += 1
if streaming:
with self.send_lock:
bytes_sent = Msgpack.stream(message, self.sock.sendall)
self.bytes_sent += bytes_sent
self.server.bytes_sent += bytes_sent
self.server.stat_sent[stat_key]["bytes"] += bytes_sent
message = None
else:
data = Msgpack.pack(message)
self.bytes_sent += len(data)
self.server.bytes_sent += len(data)
self.server.stat_sent[stat_key]["bytes"] += len(data)
message = None
with self.send_lock:
self.sock.sendall(data)
except Exception as err:
self.close("Send error: %s (cmd: %s)" % (err, stat_key))
return False
self.last_sent_time = time.time()
return True
# Stream file to connection without msgpacking
def sendRawfile(self, file, read_bytes):
buff = 64 * 1024
bytes_left = read_bytes
bytes_sent = 0
while True:
self.last_send_time = time.time()
data = file.read(min(bytes_left, buff))
bytes_sent += len(data)
with self.send_lock:
self.sock.sendall(data)
bytes_left -= buff
if bytes_left <= 0:
break
self.bytes_sent += bytes_sent
self.server.bytes_sent += bytes_sent
self.server.stat_sent["raw_file"]["num"] += 1
self.server.stat_sent["raw_file"]["bytes"] += bytes_sent
return True
# Create and send a request to peer
def request(self, cmd, params={}, stream_to=None):
# Last command sent more than 10 sec ago, timeout
if self.waiting_requests and self.protocol == "v2" and time.time() - max(self.last_req_time, self.last_recv_time) > 10:
self.close("Request %s timeout: %.3fs" % (self.last_cmd_sent, time.time() - self.last_send_time))
return False
self.last_req_time = time.time()
self.last_cmd_sent = cmd
self.req_id += 1
data = {"cmd": cmd, "req_id": self.req_id, "params": params}
event = gevent.event.AsyncResult() # Create new event for response
self.waiting_requests[self.req_id] = {"evt": event, "cmd": cmd}
if stream_to:
self.waiting_streams[self.req_id] = stream_to
self.send(data) # Send request
res = event.get() # Wait until event solves
return res
def ping(self):
s = time.time()
response = None
with gevent.Timeout(10.0, False):
try:
response = self.request("ping")
except Exception as err:
self.log("Ping error: %s" % Debug.formatException(err))
if response and "body" in response and response["body"] == b"Pong!":
self.last_ping_delay = time.time() - s
return True
else:
return False
# Close connection
def close(self, reason="Unknown"):
if self.closed:
return False # Already closed
self.closed = True
self.connected = False
if self.event_connected:
self.event_connected.set(False)
self.log(
"Closing connection: %s, waiting_requests: %s, sites: %s, buff: %s..." %
(reason, len(self.waiting_requests), self.sites, self.incomplete_buff_recv)
)
for request in self.waiting_requests.values(): # Mark pending requests failed
request["evt"].set(False)
self.waiting_requests = {}
self.waiting_streams = {}
self.sites = 0
self.server.removeConnection(self) # Remove connection from server registry
try:
if self.sock:
self.sock.shutdown(gevent.socket.SHUT_WR)
self.sock.close()
except Exception as err:
if config.debug_socket:
self.log("Close error: %s" % err)
# Little cleanup
self.sock = None
self.unpacker = None
self.event_connected = None

View File

@ -1,373 +0,0 @@
import logging
import time
import sys
import socket
from collections import defaultdict
import gevent
import msgpack
from gevent.server import StreamServer
from gevent.pool import Pool
import util
from util import helper
from Debug import Debug
from .Connection import Connection
from Config import config
from Crypt import CryptConnection
from Crypt import CryptHash
from Tor import TorManager
from Site import SiteManager
class ConnectionServer(object):
def __init__(self, ip=None, port=None, request_handler=None):
if not ip:
if config.fileserver_ip_type == "ipv6":
ip = "::1"
else:
ip = "127.0.0.1"
port = 15441
self.ip = ip
self.port = port
self.last_connection_id = 1 # Connection id incrementer
self.log = logging.getLogger("ConnServer")
self.port_opened = {}
self.peer_blacklist = SiteManager.peer_blacklist
self.tor_manager = TorManager(self.ip, self.port)
self.connections = [] # Connections
self.whitelist = config.ip_local # No flood protection on this ips
self.ip_incoming = {} # Incoming connections from ip in the last minute to avoid connection flood
self.broken_ssl_ips = {} # Peerids of broken ssl connections
self.ips = {} # Connection by ip
self.has_internet = True # Internet outage detection
self.stream_server = None
self.stream_server_proxy = None
self.running = False
self.stopping = False
self.thread_checker = None
self.stat_recv = defaultdict(lambda: defaultdict(int))
self.stat_sent = defaultdict(lambda: defaultdict(int))
self.bytes_recv = 0
self.bytes_sent = 0
self.num_recv = 0
self.num_sent = 0
self.num_incoming = 0
self.num_outgoing = 0
self.had_external_incoming = False
self.timecorrection = 0.0
self.pool = Pool(500) # do not accept more than 500 connections
# Bittorrent style peerid
self.peer_id = "-UT3530-%s" % CryptHash.random(12, "base64")
# Check msgpack version
if msgpack.version[0] == 0 and msgpack.version[1] < 4:
self.log.error(
"Error: Unsupported msgpack version: %s (<0.4.0), please run `sudo apt-get install python-pip; sudo pip install msgpack --upgrade`" %
str(msgpack.version)
)
sys.exit(0)
if request_handler:
self.handleRequest = request_handler
def start(self, check_connections=True):
if self.stopping:
return False
self.running = True
if check_connections:
self.thread_checker = gevent.spawn(self.checkConnections)
CryptConnection.manager.loadCerts()
if config.tor != "disable":
self.tor_manager.start()
if not self.port:
self.log.info("No port found, not binding")
return False
self.log.debug("Binding to: %s:%s, (msgpack: %s), supported crypt: %s" % (
self.ip, self.port, ".".join(map(str, msgpack.version)),
CryptConnection.manager.crypt_supported
))
try:
self.stream_server = StreamServer(
(self.ip, self.port), self.handleIncomingConnection, spawn=self.pool, backlog=100
)
except Exception as err:
self.log.info("StreamServer create error: %s" % Debug.formatException(err))
def listen(self):
if not self.running:
return None
if self.stream_server_proxy:
gevent.spawn(self.listenProxy)
try:
self.stream_server.serve_forever()
except Exception as err:
self.log.info("StreamServer listen error: %s" % err)
return False
self.log.debug("Stopped.")
def stop(self):
self.log.debug("Stopping %s" % self.stream_server)
self.stopping = True
self.running = False
if self.thread_checker:
gevent.kill(self.thread_checker)
if self.stream_server:
self.stream_server.stop()
def closeConnections(self):
self.log.debug("Closing all connection: %s" % len(self.connections))
for connection in self.connections[:]:
connection.close("Close all connections")
def handleIncomingConnection(self, sock, addr):
if config.offline:
sock.close()
return False
ip, port = addr[0:2]
ip = ip.lower()
if ip.startswith("::ffff:"): # IPv6 to IPv4 mapping
ip = ip.replace("::ffff:", "", 1)
self.num_incoming += 1
if not self.had_external_incoming and not helper.isPrivateIp(ip):
self.had_external_incoming = True
# Connection flood protection
if ip in self.ip_incoming and ip not in self.whitelist:
self.ip_incoming[ip] += 1
if self.ip_incoming[ip] > 6: # Allow 6 in 1 minute from same ip
self.log.debug("Connection flood detected from %s" % ip)
time.sleep(30)
sock.close()
return False
else:
self.ip_incoming[ip] = 1
connection = Connection(self, ip, port, sock)
self.connections.append(connection)
if ip not in config.ip_local:
self.ips[ip] = connection
connection.handleIncomingConnection(sock)
def handleMessage(self, *args, **kwargs):
pass
def getConnection(self, ip=None, port=None, peer_id=None, create=True, site=None, is_tracker_connection=False):
ip_type = helper.getIpType(ip)
has_per_site_onion = (ip.endswith(".onion") or self.port_opened.get(ip_type, None) == False) and self.tor_manager.start_onions and site
if has_per_site_onion: # Site-unique connection for Tor
if ip.endswith(".onion"):
site_onion = self.tor_manager.getOnion(site.address)
else:
site_onion = self.tor_manager.getOnion("global")
key = ip + site_onion
else:
key = ip
# Find connection by ip
if key in self.ips:
connection = self.ips[key]
if not peer_id or connection.handshake.get("peer_id") == peer_id: # Filter by peer_id
if not connection.connected and create:
succ = connection.event_connected.get() # Wait for connection
if not succ:
raise Exception("Connection event return error")
return connection
# Recover from connection pool
for connection in self.connections:
if connection.ip == ip:
if peer_id and connection.handshake.get("peer_id") != peer_id: # Does not match
continue
if ip.endswith(".onion") and self.tor_manager.start_onions and ip.replace(".onion", "") != connection.target_onion:
# For different site
continue
if not connection.connected and create:
succ = connection.event_connected.get() # Wait for connection
if not succ:
raise Exception("Connection event return error")
return connection
# No connection found
if create and not config.offline: # Allow to create new connection if not found
if port == 0:
raise Exception("This peer is not connectable")
if (ip, port) in self.peer_blacklist and not is_tracker_connection:
raise Exception("This peer is blacklisted")
try:
if has_per_site_onion: # Lock connection to site
connection = Connection(self, ip, port, target_onion=site_onion, is_tracker_connection=is_tracker_connection)
else:
connection = Connection(self, ip, port, is_tracker_connection=is_tracker_connection)
self.num_outgoing += 1
self.ips[key] = connection
self.connections.append(connection)
connection.log("Connecting... (site: %s)" % site)
succ = connection.connect()
if not succ:
connection.close("Connection event return error")
raise Exception("Connection event return error")
except Exception as err:
connection.close("%s Connect error: %s" % (ip, Debug.formatException(err)))
raise err
if len(self.connections) > config.global_connected_limit:
gevent.spawn(self.checkMaxConnections)
return connection
else:
return None
def removeConnection(self, connection):
# Delete if same as in registry
if self.ips.get(connection.ip) == connection:
del self.ips[connection.ip]
# Site locked connection
if connection.target_onion:
if self.ips.get(connection.ip + connection.target_onion) == connection:
del self.ips[connection.ip + connection.target_onion]
# Cert pinned connection
if connection.cert_pin and self.ips.get(connection.ip + "#" + connection.cert_pin) == connection:
del self.ips[connection.ip + "#" + connection.cert_pin]
if connection in self.connections:
self.connections.remove(connection)
def checkConnections(self):
run_i = 0
time.sleep(15)
while self.running:
run_i += 1
self.ip_incoming = {} # Reset connected ips counter
last_message_time = 0
s = time.time()
for connection in self.connections[:]: # Make a copy
if connection.ip.endswith(".onion") or config.tor == "always":
timeout_multipler = 2
else:
timeout_multipler = 1
idle = time.time() - max(connection.last_recv_time, connection.start_time, connection.last_message_time)
if connection.last_message_time > last_message_time and not connection.is_private_ip:
# Message from local IPs does not means internet connection
last_message_time = connection.last_message_time
if connection.unpacker and idle > 30:
# Delete the unpacker if not needed
del connection.unpacker
connection.unpacker = None
elif connection.last_cmd_sent == "announce" and idle > 20: # Bootstrapper connection close after 20 sec
connection.close("[Cleanup] Tracker connection, idle: %.3fs" % idle)
if idle > 60 * 60:
# Wake up after 1h
connection.close("[Cleanup] After wakeup, idle: %.3fs" % idle)
elif idle > 20 * 60 and connection.last_send_time < time.time() - 10:
# Idle more than 20 min and we have not sent request in last 10 sec
if not connection.ping():
connection.close("[Cleanup] Ping timeout")
elif idle > 10 * timeout_multipler and connection.incomplete_buff_recv > 0:
# Incomplete data with more than 10 sec idle
connection.close("[Cleanup] Connection buff stalled")
elif idle > 10 * timeout_multipler and connection.protocol == "?": # No connection after 10 sec
connection.close(
"[Cleanup] Connect timeout: %.3fs" % idle
)
elif idle > 10 * timeout_multipler and connection.waiting_requests and time.time() - connection.last_send_time > 10 * timeout_multipler:
# Sent command and no response in 10 sec
connection.close(
"[Cleanup] Command %s timeout: %.3fs" % (connection.last_cmd_sent, time.time() - connection.last_send_time)
)
elif idle < 60 and connection.bad_actions > 40:
connection.close(
"[Cleanup] Too many bad actions: %s" % connection.bad_actions
)
elif idle > 5 * 60 and connection.sites == 0:
connection.close(
"[Cleanup] No site for connection"
)
elif run_i % 90 == 0:
# Reset bad action counter every 30 min
connection.bad_actions = 0
# Internet outage detection
if time.time() - last_message_time > max(60, 60 * 10 / max(1, float(len(self.connections)) / 50)):
# Offline: Last message more than 60-600sec depending on connection number
if self.has_internet and last_message_time:
self.has_internet = False
self.onInternetOffline()
else:
# Online
if not self.has_internet:
self.has_internet = True
self.onInternetOnline()
self.timecorrection = self.getTimecorrection()
if time.time() - s > 0.01:
self.log.debug("Connection cleanup in %.3fs" % (time.time() - s))
time.sleep(15)
self.log.debug("Checkconnections ended")
@util.Noparallel(blocking=False)
def checkMaxConnections(self):
if len(self.connections) < config.global_connected_limit:
return 0
s = time.time()
num_connected_before = len(self.connections)
self.connections.sort(key=lambda connection: connection.sites)
num_closed = 0
for connection in self.connections:
idle = time.time() - max(connection.last_recv_time, connection.start_time, connection.last_message_time)
if idle > 60:
connection.close("Connection limit reached")
num_closed += 1
if num_closed > config.global_connected_limit * 0.1:
break
self.log.debug("Closed %s connections of %s after reached limit %s in %.3fs" % (
num_closed, num_connected_before, config.global_connected_limit, time.time() - s
))
return num_closed
def onInternetOnline(self):
self.log.info("Internet online")
def onInternetOffline(self):
self.had_external_incoming = False
self.log.info("Internet offline")
def getTimecorrection(self):
corrections = sorted([
connection.handshake.get("time") - connection.handshake_time + connection.last_ping_delay
for connection in self.connections
if connection.handshake.get("time") and connection.last_ping_delay
])
if len(corrections) < 9:
return 0.0
mid = int(len(corrections) / 2 - 1)
median = (corrections[mid - 1] + corrections[mid] + corrections[mid + 1]) / 3
return median

View File

@ -1,2 +0,0 @@
from .ConnectionServer import ConnectionServer
from .Connection import Connection

View File

@ -1,162 +0,0 @@
import os
from Db.Db import Db, DbTableError
from Config import config
from Plugin import PluginManager
from Debug import Debug
@PluginManager.acceptPlugins
class ContentDb(Db):
def __init__(self, path):
Db.__init__(self, {"db_name": "ContentDb", "tables": {}}, path)
self.foreign_keys = True
def init(self):
try:
self.schema = self.getSchema()
try:
self.checkTables()
except DbTableError:
pass
self.log.debug("Checking foreign keys...")
foreign_key_error = self.execute("PRAGMA foreign_key_check").fetchone()
if foreign_key_error:
raise Exception("Database foreign key error: %s" % foreign_key_error)
except Exception as err:
self.log.error("Error loading content.db: %s, rebuilding..." % Debug.formatException(err))
self.close()
os.unlink(self.db_path) # Remove and try again
Db.__init__(self, {"db_name": "ContentDb", "tables": {}}, self.db_path)
self.foreign_keys = True
self.schema = self.getSchema()
try:
self.checkTables()
except DbTableError:
pass
self.site_ids = {}
self.sites = {}
def getSchema(self):
schema = {}
schema["db_name"] = "ContentDb"
schema["version"] = 3
schema["tables"] = {}
if not self.getTableVersion("site"):
self.log.debug("Migrating from table version-less content.db")
version = int(self.execute("PRAGMA user_version").fetchone()[0])
if version > 0:
self.checkTables()
self.execute("INSERT INTO keyvalue ?", {"json_id": 0, "key": "table.site.version", "value": 1})
self.execute("INSERT INTO keyvalue ?", {"json_id": 0, "key": "table.content.version", "value": 1})
schema["tables"]["site"] = {
"cols": [
["site_id", "INTEGER PRIMARY KEY ASC NOT NULL UNIQUE"],
["address", "TEXT NOT NULL"]
],
"indexes": [
"CREATE UNIQUE INDEX site_address ON site (address)"
],
"schema_changed": 1
}
schema["tables"]["content"] = {
"cols": [
["content_id", "INTEGER PRIMARY KEY UNIQUE NOT NULL"],
["site_id", "INTEGER REFERENCES site (site_id) ON DELETE CASCADE"],
["inner_path", "TEXT"],
["size", "INTEGER"],
["size_files", "INTEGER"],
["size_files_optional", "INTEGER"],
["modified", "INTEGER"]
],
"indexes": [
"CREATE UNIQUE INDEX content_key ON content (site_id, inner_path)",
"CREATE INDEX content_modified ON content (site_id, modified)"
],
"schema_changed": 1
}
return schema
def initSite(self, site):
self.sites[site.address] = site
def needSite(self, site):
if site.address not in self.site_ids:
self.execute("INSERT OR IGNORE INTO site ?", {"address": site.address})
self.site_ids = {}
for row in self.execute("SELECT * FROM site"):
self.site_ids[row["address"]] = row["site_id"]
return self.site_ids[site.address]
def deleteSite(self, site):
site_id = self.site_ids.get(site.address, 0)
if site_id:
self.execute("DELETE FROM site WHERE site_id = :site_id", {"site_id": site_id})
del self.site_ids[site.address]
del self.sites[site.address]
def setContent(self, site, inner_path, content, size=0):
self.insertOrUpdate("content", {
"size": size,
"size_files": sum([val["size"] for key, val in content.get("files", {}).items()]),
"size_files_optional": sum([val["size"] for key, val in content.get("files_optional", {}).items()]),
"modified": int(content.get("modified", 0))
}, {
"site_id": self.site_ids.get(site.address, 0),
"inner_path": inner_path
})
def deleteContent(self, site, inner_path):
self.execute("DELETE FROM content WHERE ?", {"site_id": self.site_ids.get(site.address, 0), "inner_path": inner_path})
def loadDbDict(self, site):
res = self.execute(
"SELECT GROUP_CONCAT(inner_path, '|') AS inner_paths FROM content WHERE ?",
{"site_id": self.site_ids.get(site.address, 0)}
)
row = res.fetchone()
if row and row["inner_paths"]:
inner_paths = row["inner_paths"].split("|")
return dict.fromkeys(inner_paths, False)
else:
return {}
def getTotalSize(self, site, ignore=None):
params = {"site_id": self.site_ids.get(site.address, 0)}
if ignore:
params["not__inner_path"] = ignore
res = self.execute("SELECT SUM(size) + SUM(size_files) AS size, SUM(size_files_optional) AS size_optional FROM content WHERE ?", params)
row = dict(res.fetchone())
if not row["size"]:
row["size"] = 0
if not row["size_optional"]:
row["size_optional"] = 0
return row["size"], row["size_optional"]
def listModified(self, site, after=None, before=None):
params = {"site_id": self.site_ids.get(site.address, 0)}
if after:
params["modified>"] = after
if before:
params["modified<"] = before
res = self.execute("SELECT inner_path, modified FROM content WHERE ?", params)
return {row["inner_path"]: row["modified"] for row in res}
content_dbs = {}
def getContentDb(path=None):
if not path:
path = "%s/content.db" % config.data_dir
if path not in content_dbs:
content_dbs[path] = ContentDb(path)
content_dbs[path].init()
return content_dbs[path]
getContentDb() # Pre-connect to default one

View File

@ -1,155 +0,0 @@
import time
import os
from . import ContentDb
from Debug import Debug
from Config import config
class ContentDbDict(dict):
def __init__(self, site, *args, **kwargs):
s = time.time()
self.site = site
self.cached_keys = []
self.log = self.site.log
self.db = ContentDb.getContentDb()
self.db_id = self.db.needSite(site)
self.num_loaded = 0
super(ContentDbDict, self).__init__(self.db.loadDbDict(site)) # Load keys from database
self.log.debug("ContentDb init: %.3fs, found files: %s, sites: %s" % (time.time() - s, len(self), len(self.db.site_ids)))
def loadItem(self, key):
try:
self.num_loaded += 1
if self.num_loaded % 100 == 0:
if config.verbose:
self.log.debug("Loaded json: %s (latest: %s) called by: %s" % (self.num_loaded, key, Debug.formatStack()))
else:
self.log.debug("Loaded json: %s (latest: %s)" % (self.num_loaded, key))
content = self.site.storage.loadJson(key)
dict.__setitem__(self, key, content)
except IOError:
if dict.get(self, key):
self.__delitem__(key) # File not exists anymore
raise KeyError(key)
self.addCachedKey(key)
self.checkLimit()
return content
def getItemSize(self, key):
return self.site.storage.getSize(key)
# Only keep last 10 accessed json in memory
def checkLimit(self):
if len(self.cached_keys) > 10:
key_deleted = self.cached_keys.pop(0)
dict.__setitem__(self, key_deleted, False)
def addCachedKey(self, key):
if key not in self.cached_keys and key != "content.json" and len(key) > 40: # Always keep keys smaller than 40 char
self.cached_keys.append(key)
def __getitem__(self, key):
val = dict.get(self, key)
if val: # Already loaded
return val
elif val is None: # Unknown key
raise KeyError(key)
elif val is False: # Loaded before, but purged from cache
return self.loadItem(key)
def __setitem__(self, key, val):
self.addCachedKey(key)
self.checkLimit()
size = self.getItemSize(key)
self.db.setContent(self.site, key, val, size)
dict.__setitem__(self, key, val)
def __delitem__(self, key):
self.db.deleteContent(self.site, key)
dict.__delitem__(self, key)
try:
self.cached_keys.remove(key)
except ValueError:
pass
def iteritems(self):
for key in dict.keys(self):
try:
val = self[key]
except Exception as err:
self.log.warning("Error loading %s: %s" % (key, err))
continue
yield key, val
def items(self):
back = []
for key in dict.keys(self):
try:
val = self[key]
except Exception as err:
self.log.warning("Error loading %s: %s" % (key, err))
continue
back.append((key, val))
return back
def values(self):
back = []
for key, val in dict.iteritems(self):
if not val:
try:
val = self.loadItem(key)
except Exception:
continue
back.append(val)
return back
def get(self, key, default=None):
try:
return self.__getitem__(key)
except KeyError:
return default
except Exception as err:
self.site.bad_files[key] = self.site.bad_files.get(key, 1)
dict.__delitem__(self, key)
self.log.warning("Error loading %s: %s" % (key, err))
return default
def execute(self, query, params={}):
params["site_id"] = self.db_id
return self.db.execute(query, params)
if __name__ == "__main__":
import psutil
process = psutil.Process(os.getpid())
s_mem = process.memory_info()[0] / float(2 ** 20)
root = "data-live/1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27"
contents = ContentDbDict("1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27", root)
print("Init len", len(contents))
s = time.time()
for dir_name in os.listdir(root + "/data/users/")[0:8000]:
contents["data/users/%s/content.json" % dir_name]
print("Load: %.3fs" % (time.time() - s))
s = time.time()
found = 0
for key, val in contents.items():
found += 1
assert key
assert val
print("Found:", found)
print("Iteritem: %.3fs" % (time.time() - s))
s = time.time()
found = 0
for key in list(contents.keys()):
found += 1
assert key in contents
print("In: %.3fs" % (time.time() - s))
print("Len:", len(list(contents.values())), len(list(contents.keys())))
print("Mem: +", process.memory_info()[0] / float(2 ** 20) - s_mem)

File diff suppressed because it is too large Load Diff

View File

@ -1 +0,0 @@
from .ContentManager import ContentManager

View File

@ -1,4 +0,0 @@
from Config import config
from util import ThreadPool
thread_pool_crypt = ThreadPool.ThreadPool(config.threads_crypt)

View File

@ -1,101 +0,0 @@
import logging
import base64
import binascii
import time
import hashlib
from util.Electrum import dbl_format
from Config import config
import util.OpensslFindPatch
lib_verify_best = "sslcrypto"
from lib import sslcrypto
sslcurve_native = sslcrypto.ecc.get_curve("secp256k1")
sslcurve_fallback = sslcrypto.fallback.ecc.get_curve("secp256k1")
sslcurve = sslcurve_native
def loadLib(lib_name, silent=False):
global sslcurve, libsecp256k1message, lib_verify_best
if lib_name == "libsecp256k1":
s = time.time()
from lib import libsecp256k1message
import coincurve
lib_verify_best = "libsecp256k1"
if not silent:
logging.info(
"Libsecpk256k1 loaded: %s in %.3fs" %
(type(coincurve._libsecp256k1.lib).__name__, time.time() - s)
)
elif lib_name == "sslcrypto":
sslcurve = sslcurve_native
if sslcurve_native == sslcurve_fallback:
logging.warning("SSLCurve fallback loaded instead of native")
elif lib_name == "sslcrypto_fallback":
sslcurve = sslcurve_fallback
try:
if not config.use_libsecp256k1:
raise Exception("Disabled by config")
loadLib("libsecp256k1")
lib_verify_best = "libsecp256k1"
except Exception as err:
logging.info("Libsecp256k1 load failed: %s" % err)
def newPrivatekey(): # Return new private key
return sslcurve.private_to_wif(sslcurve.new_private_key()).decode()
def newSeed():
return binascii.hexlify(sslcurve.new_private_key()).decode()
def hdPrivatekey(seed, child):
# Too large child id could cause problems
privatekey_bin = sslcurve.derive_child(seed.encode(), child % 100000000)
return sslcurve.private_to_wif(privatekey_bin).decode()
def privatekeyToAddress(privatekey): # Return address from private key
try:
if len(privatekey) == 64:
privatekey_bin = bytes.fromhex(privatekey)
else:
privatekey_bin = sslcurve.wif_to_private(privatekey.encode())
return sslcurve.private_to_address(privatekey_bin).decode()
except Exception: # Invalid privatekey
return False
def sign(data, privatekey): # Return sign to data using private key
if privatekey.startswith("23") and len(privatekey) > 52:
return None # Old style private key not supported
return base64.b64encode(sslcurve.sign(
data.encode(),
sslcurve.wif_to_private(privatekey.encode()),
recoverable=True,
hash=dbl_format
)).decode()
def verify(data, valid_address, sign, lib_verify=None): # Verify data using address and sign
if not lib_verify:
lib_verify = lib_verify_best
if not sign:
return False
if lib_verify == "libsecp256k1":
sign_address = libsecp256k1message.recover_address(data.encode("utf8"), sign).decode("utf8")
elif lib_verify in ("sslcrypto", "sslcrypto_fallback"):
publickey = sslcurve.recover(base64.b64decode(sign), data.encode(), hash=dbl_format)
sign_address = sslcurve.public_to_address(publickey).decode()
else:
raise Exception("No library enabled for signature verification")
if type(valid_address) is list: # Any address in the list
return sign_address in valid_address
else: # One possible address
return sign_address == valid_address

View File

@ -1,217 +0,0 @@
import sys
import logging
import os
import ssl
import hashlib
import random
from Config import config
from util import helper
class CryptConnectionManager:
def __init__(self):
if config.openssl_bin_file:
self.openssl_bin = config.openssl_bin_file
elif sys.platform.startswith("win"):
self.openssl_bin = "tools\\openssl\\openssl.exe"
elif config.dist_type.startswith("bundle_linux"):
self.openssl_bin = "../runtime/bin/openssl"
else:
self.openssl_bin = "openssl"
self.context_client = None
self.context_server = None
self.openssl_conf_template = "src/lib/openssl/openssl.cnf"
self.openssl_conf = config.data_dir + "/openssl.cnf"
self.openssl_env = {
"OPENSSL_CONF": self.openssl_conf,
"RANDFILE": config.data_dir + "/openssl-rand.tmp"
}
self.crypt_supported = [] # Supported cryptos
self.cacert_pem = config.data_dir + "/cacert-rsa.pem"
self.cakey_pem = config.data_dir + "/cakey-rsa.pem"
self.cert_pem = config.data_dir + "/cert-rsa.pem"
self.cert_csr = config.data_dir + "/cert-rsa.csr"
self.key_pem = config.data_dir + "/key-rsa.pem"
self.log = logging.getLogger("CryptConnectionManager")
self.log.debug("Version: %s" % ssl.OPENSSL_VERSION)
self.fakedomains = [
"yahoo.com", "amazon.com", "live.com", "microsoft.com", "mail.ru", "csdn.net", "bing.com",
"amazon.co.jp", "office.com", "imdb.com", "msn.com", "samsung.com", "huawei.com", "ztedevices.com",
"godaddy.com", "w3.org", "gravatar.com", "creativecommons.org", "hatena.ne.jp",
"adobe.com", "opera.com", "apache.org", "rambler.ru", "one.com", "nationalgeographic.com",
"networksolutions.com", "php.net", "python.org", "phoca.cz", "debian.org", "ubuntu.com",
"nazwa.pl", "symantec.com"
]
def createSslContexts(self):
if self.context_server and self.context_client:
return False
ciphers = "ECDHE-RSA-CHACHA20-POLY1305:ECDHE-RSA-AES128-GCM-SHA256:AES128-SHA256:AES256-SHA:"
ciphers += "!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK"
if hasattr(ssl, "PROTOCOL_TLS"):
protocol = ssl.PROTOCOL_TLS
else:
protocol = ssl.PROTOCOL_TLSv1_2
self.context_client = ssl.SSLContext(protocol)
self.context_client.check_hostname = False
self.context_client.verify_mode = ssl.CERT_NONE
self.context_server = ssl.SSLContext(protocol)
self.context_server.load_cert_chain(self.cert_pem, self.key_pem)
for ctx in (self.context_client, self.context_server):
ctx.set_ciphers(ciphers)
ctx.options |= ssl.OP_NO_COMPRESSION
try:
ctx.set_alpn_protocols(["h2", "http/1.1"])
ctx.set_npn_protocols(["h2", "http/1.1"])
except Exception:
pass
# Select crypt that supported by both sides
# Return: Name of the crypto
def selectCrypt(self, client_supported):
for crypt in self.crypt_supported:
if crypt in client_supported:
return crypt
return False
# Wrap socket for crypt
# Return: wrapped socket
def wrapSocket(self, sock, crypt, server=False, cert_pin=None):
if crypt == "tls-rsa":
if server:
sock_wrapped = self.context_server.wrap_socket(sock, server_side=True)
else:
sock_wrapped = self.context_client.wrap_socket(sock, server_hostname=random.choice(self.fakedomains))
if cert_pin:
cert_hash = hashlib.sha256(sock_wrapped.getpeercert(True)).hexdigest()
if cert_hash != cert_pin:
raise Exception("Socket certificate does not match (%s != %s)" % (cert_hash, cert_pin))
return sock_wrapped
else:
return sock
def removeCerts(self):
if config.keep_ssl_cert:
return False
for file_name in ["cert-rsa.pem", "key-rsa.pem", "cacert-rsa.pem", "cakey-rsa.pem", "cacert-rsa.srl", "cert-rsa.csr", "openssl-rand.tmp"]:
file_path = "%s/%s" % (config.data_dir, file_name)
if os.path.isfile(file_path):
os.unlink(file_path)
# Load and create cert files is necessary
def loadCerts(self):
if config.disable_encryption:
return False
if self.createSslRsaCert() and "tls-rsa" not in self.crypt_supported:
self.crypt_supported.append("tls-rsa")
# Try to create RSA server cert + sign for connection encryption
# Return: True on success
def createSslRsaCert(self):
casubjects = [
"/C=US/O=Amazon/OU=Server CA 1B/CN=Amazon",
"/C=US/O=Let's Encrypt/CN=Let's Encrypt Authority X3",
"/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert SHA2 High Assurance Server CA",
"/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO RSA Domain Validation Secure Server CA"
]
self.openssl_env['CN'] = random.choice(self.fakedomains)
if os.path.isfile(self.cert_pem) and os.path.isfile(self.key_pem):
self.createSslContexts()
return True # Files already exits
import subprocess
# Replace variables in config template
conf_template = open(self.openssl_conf_template).read()
conf_template = conf_template.replace("$ENV::CN", self.openssl_env['CN'])
open(self.openssl_conf, "w").write(conf_template)
# Generate CAcert and CAkey
cmd_params = helper.shellquote(
self.openssl_bin,
self.openssl_conf,
random.choice(casubjects),
self.cakey_pem,
self.cacert_pem
)
cmd = "%s req -new -newkey rsa:2048 -days 3650 -nodes -x509 -config %s -subj %s -keyout %s -out %s -batch" % cmd_params
self.log.debug("Generating RSA CAcert and CAkey PEM files...")
self.log.debug("Running: %s" % cmd)
proc = subprocess.Popen(
cmd, shell=True, stderr=subprocess.STDOUT,
stdout=subprocess.PIPE, env=self.openssl_env
)
back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "")
proc.wait()
if not (os.path.isfile(self.cacert_pem) and os.path.isfile(self.cakey_pem)):
self.log.error("RSA ECC SSL CAcert generation failed, CAcert or CAkey files not exist. (%s)" % back)
return False
else:
self.log.debug("Result: %s" % back)
# Generate certificate key and signing request
cmd_params = helper.shellquote(
self.openssl_bin,
self.key_pem,
self.cert_csr,
"/CN=" + self.openssl_env['CN'],
self.openssl_conf,
)
cmd = "%s req -new -newkey rsa:2048 -keyout %s -out %s -subj %s -sha256 -nodes -batch -config %s" % cmd_params
self.log.debug("Generating certificate key and signing request...")
proc = subprocess.Popen(
cmd, shell=True, stderr=subprocess.STDOUT,
stdout=subprocess.PIPE, env=self.openssl_env
)
back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "")
proc.wait()
self.log.debug("Running: %s\n%s" % (cmd, back))
# Sign request and generate certificate
cmd_params = helper.shellquote(
self.openssl_bin,
self.cert_csr,
self.cacert_pem,
self.cakey_pem,
self.cert_pem,
self.openssl_conf
)
cmd = "%s x509 -req -in %s -CA %s -CAkey %s -set_serial 01 -out %s -days 730 -sha256 -extensions x509_ext -extfile %s" % cmd_params
self.log.debug("Generating RSA cert...")
proc = subprocess.Popen(
cmd, shell=True, stderr=subprocess.STDOUT,
stdout=subprocess.PIPE, env=self.openssl_env
)
back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "")
proc.wait()
self.log.debug("Running: %s\n%s" % (cmd, back))
if os.path.isfile(self.cert_pem) and os.path.isfile(self.key_pem):
self.createSslContexts()
# Remove no longer necessary files
os.unlink(self.openssl_conf)
os.unlink(self.cacert_pem)
os.unlink(self.cakey_pem)
os.unlink(self.cert_csr)
return True
else:
self.log.error("RSA ECC SSL cert generation failed, cert or key files not exist.")
manager = CryptConnectionManager()

View File

@ -1,56 +0,0 @@
import hashlib
import os
import base64
def sha512sum(file, blocksize=65536, format="hexdigest"):
if type(file) is str: # Filename specified
file = open(file, "rb")
hash = hashlib.sha512()
for block in iter(lambda: file.read(blocksize), b""):
hash.update(block)
# Truncate to 256bits is good enough
if format == "hexdigest":
return hash.hexdigest()[0:64]
else:
return hash.digest()[0:32]
def sha256sum(file, blocksize=65536):
if type(file) is str: # Filename specified
file = open(file, "rb")
hash = hashlib.sha256()
for block in iter(lambda: file.read(blocksize), b""):
hash.update(block)
return hash.hexdigest()
def random(length=64, encoding="hex"):
if encoding == "base64": # Characters: A-Za-z0-9
hash = hashlib.sha512(os.urandom(256)).digest()
return base64.b64encode(hash).decode("ascii").replace("+", "").replace("/", "").replace("=", "")[0:length]
else: # Characters: a-f0-9 (faster)
return hashlib.sha512(os.urandom(256)).hexdigest()[0:length]
# Sha512 truncated to 256bits
class Sha512t:
def __init__(self, data):
if data:
self.sha512 = hashlib.sha512(data)
else:
self.sha512 = hashlib.sha512()
def hexdigest(self):
return self.sha512.hexdigest()[0:64]
def digest(self):
return self.sha512.digest()[0:32]
def update(self, data):
return self.sha512.update(data)
def sha512t(data=None):
return Sha512t(data)

View File

@ -1,38 +0,0 @@
import base64
import hashlib
def sign(data, privatekey):
import rsa
from rsa import pkcs1
if "BEGIN RSA PRIVATE KEY" not in privatekey:
privatekey = "-----BEGIN RSA PRIVATE KEY-----\n%s\n-----END RSA PRIVATE KEY-----" % privatekey
priv = rsa.PrivateKey.load_pkcs1(privatekey)
sign = rsa.pkcs1.sign(data, priv, 'SHA-256')
return sign
def verify(data, publickey, sign):
import rsa
from rsa import pkcs1
pub = rsa.PublicKey.load_pkcs1(publickey, format="DER")
try:
valid = rsa.pkcs1.verify(data, sign, pub)
except pkcs1.VerificationError:
valid = False
return valid
def privatekeyToPublickey(privatekey):
import rsa
from rsa import pkcs1
if "BEGIN RSA PRIVATE KEY" not in privatekey:
privatekey = "-----BEGIN RSA PRIVATE KEY-----\n%s\n-----END RSA PRIVATE KEY-----" % privatekey
priv = rsa.PrivateKey.load_pkcs1(privatekey)
pub = rsa.PublicKey(priv.n, priv.e)
return pub.save_pkcs1("DER")
def publickeyToOnion(publickey):
return base64.b32encode(hashlib.sha1(publickey).digest()[:10]).lower().decode("ascii")

View File

View File

@ -1,519 +0,0 @@
import sqlite3
import json
import time
import logging
import re
import os
import atexit
import threading
import sys
import weakref
import errno
import gevent
from Debug import Debug
from .DbCursor import DbCursor
from util import SafeRe
from util import helper
from util import ThreadPool
from Config import config
thread_pool_db = ThreadPool.ThreadPool(config.threads_db)
next_db_id = 0
opened_dbs = []
# Close idle databases to save some memory
def dbCleanup():
while 1:
time.sleep(60 * 5)
for db in opened_dbs[:]:
idle = time.time() - db.last_query_time
if idle > 60 * 5 and db.close_idle:
db.close("Cleanup")
def dbCommitCheck():
while 1:
time.sleep(5)
for db in opened_dbs[:]:
if not db.need_commit:
continue
success = db.commit("Interval")
if success:
db.need_commit = False
time.sleep(0.1)
def dbCloseAll():
for db in opened_dbs[:]:
db.close("Close all")
gevent.spawn(dbCleanup)
gevent.spawn(dbCommitCheck)
atexit.register(dbCloseAll)
class DbTableError(Exception):
def __init__(self, message, table):
super().__init__(message)
self.table = table
class Db(object):
def __init__(self, schema, db_path, close_idle=False):
global next_db_id
self.db_path = db_path
self.db_dir = os.path.dirname(db_path) + "/"
self.schema = schema
self.schema["version"] = self.schema.get("version", 1)
self.conn = None
self.cur = None
self.cursors = weakref.WeakSet()
self.id = next_db_id
next_db_id += 1
self.progress_sleeping = False
self.commiting = False
self.log = logging.getLogger("Db#%s:%s" % (self.id, schema["db_name"]))
self.table_names = None
self.collect_stats = False
self.foreign_keys = False
self.need_commit = False
self.query_stats = {}
self.db_keyvalues = {}
self.delayed_queue = []
self.delayed_queue_thread = None
self.close_idle = close_idle
self.last_query_time = time.time()
self.last_sleep_time = time.time()
self.num_execute_since_sleep = 0
self.lock = ThreadPool.Lock()
self.connect_lock = ThreadPool.Lock()
def __repr__(self):
return "<Db#%s:%s close_idle:%s>" % (id(self), self.db_path, self.close_idle)
def connect(self):
self.connect_lock.acquire(True)
try:
if self.conn:
self.log.debug("Already connected, connection ignored")
return
if self not in opened_dbs:
opened_dbs.append(self)
s = time.time()
try: # Directory not exist yet
os.makedirs(self.db_dir)
self.log.debug("Created Db path: %s" % self.db_dir)
except OSError as err:
if err.errno != errno.EEXIST:
raise err
if not os.path.isfile(self.db_path):
self.log.debug("Db file not exist yet: %s" % self.db_path)
self.conn = sqlite3.connect(self.db_path, isolation_level="DEFERRED", check_same_thread=False)
self.conn.row_factory = sqlite3.Row
self.conn.set_progress_handler(self.progress, 5000000)
self.conn.execute('PRAGMA journal_mode=WAL')
if self.foreign_keys:
self.conn.execute("PRAGMA foreign_keys = ON")
self.cur = self.getCursor()
self.log.debug(
"Connected to %s in %.3fs (opened: %s, sqlite version: %s)..." %
(self.db_path, time.time() - s, len(opened_dbs), sqlite3.version)
)
self.log.debug("Connect by thread: %s" % threading.current_thread().ident)
self.log.debug("Connect called by %s" % Debug.formatStack())
finally:
self.connect_lock.release()
def getConn(self):
if not self.conn:
self.connect()
return self.conn
def progress(self, *args, **kwargs):
self.progress_sleeping = True
time.sleep(0.001)
self.progress_sleeping = False
# Execute query using dbcursor
def execute(self, query, params=None):
if not self.conn:
self.connect()
return self.cur.execute(query, params)
@thread_pool_db.wrap
def commit(self, reason="Unknown"):
if self.progress_sleeping:
self.log.debug("Commit ignored: Progress sleeping")
return False
if not self.conn:
self.log.debug("Commit ignored: No connection")
return False
if self.commiting:
self.log.debug("Commit ignored: Already commiting")
return False
try:
s = time.time()
self.commiting = True
self.conn.commit()
self.log.debug("Commited in %.3fs (reason: %s)" % (time.time() - s, reason))
return True
except Exception as err:
if "SQL statements in progress" in str(err):
self.log.warning("Commit delayed: %s (reason: %s)" % (Debug.formatException(err), reason))
else:
self.log.error("Commit error: %s (reason: %s)" % (Debug.formatException(err), reason))
return False
finally:
self.commiting = False
def insertOrUpdate(self, *args, **kwargs):
if not self.conn:
self.connect()
return self.cur.insertOrUpdate(*args, **kwargs)
def executeDelayed(self, *args, **kwargs):
if not self.delayed_queue_thread:
self.delayed_queue_thread = gevent.spawn_later(1, self.processDelayed)
self.delayed_queue.append(("execute", (args, kwargs)))
def insertOrUpdateDelayed(self, *args, **kwargs):
if not self.delayed_queue:
gevent.spawn_later(1, self.processDelayed)
self.delayed_queue.append(("insertOrUpdate", (args, kwargs)))
def processDelayed(self):
if not self.delayed_queue:
self.log.debug("processDelayed aborted")
return
if not self.conn:
self.connect()
s = time.time()
cur = self.getCursor()
for command, params in self.delayed_queue:
if command == "insertOrUpdate":
cur.insertOrUpdate(*params[0], **params[1])
else:
cur.execute(*params[0], **params[1])
if len(self.delayed_queue) > 10:
self.log.debug("Processed %s delayed queue in %.3fs" % (len(self.delayed_queue), time.time() - s))
self.delayed_queue = []
self.delayed_queue_thread = None
def close(self, reason="Unknown"):
if not self.conn:
return False
self.connect_lock.acquire()
s = time.time()
if self.delayed_queue:
self.processDelayed()
if self in opened_dbs:
opened_dbs.remove(self)
self.need_commit = False
self.commit("Closing: %s" % reason)
self.log.debug("Close called by %s" % Debug.formatStack())
for i in range(5):
if len(self.cursors) == 0:
break
self.log.debug("Pending cursors: %s" % len(self.cursors))
time.sleep(0.1 * i)
if len(self.cursors):
self.log.debug("Killing cursors: %s" % len(self.cursors))
self.conn.interrupt()
if self.cur:
self.cur.close()
if self.conn:
ThreadPool.main_loop.call(self.conn.close)
self.conn = None
self.cur = None
self.log.debug("%s closed (reason: %s) in %.3fs, opened: %s" % (self.db_path, reason, time.time() - s, len(opened_dbs)))
self.connect_lock.release()
return True
# Gets a cursor object to database
# Return: Cursor class
def getCursor(self):
if not self.conn:
self.connect()
cur = DbCursor(self)
return cur
def getSharedCursor(self):
if not self.conn:
self.connect()
return self.cur
# Get the table version
# Return: Table version or None if not exist
def getTableVersion(self, table_name):
if not self.db_keyvalues: # Get db keyvalues
try:
res = self.execute("SELECT * FROM keyvalue WHERE json_id=0") # json_id = 0 is internal keyvalues
except sqlite3.OperationalError as err: # Table not exist
self.log.debug("Query table version error: %s" % err)
return False
for row in res:
self.db_keyvalues[row["key"]] = row["value"]
return self.db_keyvalues.get("table.%s.version" % table_name, 0)
# Check Db tables
# Return: <list> Changed table names
def checkTables(self):
s = time.time()
changed_tables = []
cur = self.getSharedCursor()
# Check internal tables
# Check keyvalue table
changed = cur.needTable("keyvalue", [
["keyvalue_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
["key", "TEXT"],
["value", "INTEGER"],
["json_id", "INTEGER"],
], [
"CREATE UNIQUE INDEX key_id ON keyvalue(json_id, key)"
], version=self.schema["version"])
if changed:
changed_tables.append("keyvalue")
# Create json table if no custom one defined
if "json" not in self.schema.get("tables", {}):
if self.schema["version"] == 1:
changed = cur.needTable("json", [
["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
["path", "VARCHAR(255)"]
], [
"CREATE UNIQUE INDEX path ON json(path)"
], version=self.schema["version"])
elif self.schema["version"] == 2:
changed = cur.needTable("json", [
["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
["directory", "VARCHAR(255)"],
["file_name", "VARCHAR(255)"]
], [
"CREATE UNIQUE INDEX path ON json(directory, file_name)"
], version=self.schema["version"])
elif self.schema["version"] == 3:
changed = cur.needTable("json", [
["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"],
["site", "VARCHAR(255)"],
["directory", "VARCHAR(255)"],
["file_name", "VARCHAR(255)"]
], [
"CREATE UNIQUE INDEX path ON json(directory, site, file_name)"
], version=self.schema["version"])
if changed:
changed_tables.append("json")
# Check schema tables
for table_name, table_settings in self.schema.get("tables", {}).items():
try:
indexes = table_settings.get("indexes", [])
version = table_settings.get("schema_changed", 0)
changed = cur.needTable(
table_name, table_settings["cols"],
indexes, version=version
)
if changed:
changed_tables.append(table_name)
except Exception as err:
self.log.error("Error creating table %s: %s" % (table_name, Debug.formatException(err)))
raise DbTableError(err, table_name)
self.log.debug("Db check done in %.3fs, changed tables: %s" % (time.time() - s, changed_tables))
if changed_tables:
self.db_keyvalues = {} # Refresh table version cache
return changed_tables
# Update json file to db
# Return: True if matched
def updateJson(self, file_path, file=None, cur=None):
if not file_path.startswith(self.db_dir):
return False # Not from the db dir: Skipping
relative_path = file_path[len(self.db_dir):] # File path realative to db file
# Check if filename matches any of mappings in schema
matched_maps = []
for match, map_settings in self.schema["maps"].items():
try:
if SafeRe.match(match, relative_path):
matched_maps.append(map_settings)
except SafeRe.UnsafePatternError as err:
self.log.error(err)
# No match found for the file
if not matched_maps:
return False
# Load the json file
try:
if file is None: # Open file is not file object passed
file = open(file_path, "rb")
if file is False: # File deleted
data = {}
else:
if file_path.endswith("json.gz"):
file = helper.limitedGzipFile(fileobj=file)
if sys.version_info.major == 3 and sys.version_info.minor < 6:
data = json.loads(file.read().decode("utf8"))
else:
data = json.load(file)
except Exception as err:
self.log.debug("Json file %s load error: %s" % (file_path, err))
data = {}
# No cursor specificed
if not cur:
cur = self.getSharedCursor()
cur.logging = False
# Row for current json file if required
if not data or [dbmap for dbmap in matched_maps if "to_keyvalue" in dbmap or "to_table" in dbmap]:
json_row = cur.getJsonRow(relative_path)
# Check matched mappings in schema
for dbmap in matched_maps:
# Insert non-relational key values
if dbmap.get("to_keyvalue"):
# Get current values
res = cur.execute("SELECT * FROM keyvalue WHERE json_id = ?", (json_row["json_id"],))
current_keyvalue = {}
current_keyvalue_id = {}
for row in res:
current_keyvalue[row["key"]] = row["value"]
current_keyvalue_id[row["key"]] = row["keyvalue_id"]
for key in dbmap["to_keyvalue"]:
if key not in current_keyvalue: # Keyvalue not exist yet in the db
cur.execute(
"INSERT INTO keyvalue ?",
{"key": key, "value": data.get(key), "json_id": json_row["json_id"]}
)
elif data.get(key) != current_keyvalue[key]: # Keyvalue different value
cur.execute(
"UPDATE keyvalue SET value = ? WHERE keyvalue_id = ?",
(data.get(key), current_keyvalue_id[key])
)
# Insert data to json table for easier joins
if dbmap.get("to_json_table"):
directory, file_name = re.match("^(.*?)/*([^/]*)$", relative_path).groups()
data_json_row = dict(cur.getJsonRow(directory + "/" + dbmap.get("file_name", file_name)))
changed = False
for key in dbmap["to_json_table"]:
if data.get(key) != data_json_row.get(key):
changed = True
if changed:
# Add the custom col values
data_json_row.update({key: val for key, val in data.items() if key in dbmap["to_json_table"]})
cur.execute("INSERT OR REPLACE INTO json ?", data_json_row)
# Insert data to tables
for table_settings in dbmap.get("to_table", []):
if isinstance(table_settings, dict): # Custom settings
table_name = table_settings["table"] # Table name to insert datas
node = table_settings.get("node", table_name) # Node keyname in data json file
key_col = table_settings.get("key_col") # Map dict key as this col
val_col = table_settings.get("val_col") # Map dict value as this col
import_cols = table_settings.get("import_cols")
replaces = table_settings.get("replaces")
else: # Simple settings
table_name = table_settings
node = table_settings
key_col = None
val_col = None
import_cols = None
replaces = None
# Fill import cols from table cols
if not import_cols:
import_cols = set([item[0] for item in self.schema["tables"][table_name]["cols"]])
cur.execute("DELETE FROM %s WHERE json_id = ?" % table_name, (json_row["json_id"],))
if node not in data:
continue
if key_col: # Map as dict
for key, val in data[node].items():
if val_col: # Single value
cur.execute(
"INSERT OR REPLACE INTO %s ?" % table_name,
{key_col: key, val_col: val, "json_id": json_row["json_id"]}
)
else: # Multi value
if type(val) is dict: # Single row
row = val
if import_cols:
row = {key: row[key] for key in row if key in import_cols} # Filter row by import_cols
row[key_col] = key
# Replace in value if necessary
if replaces:
for replace_key, replace in replaces.items():
if replace_key in row:
for replace_from, replace_to in replace.items():
row[replace_key] = row[replace_key].replace(replace_from, replace_to)
row["json_id"] = json_row["json_id"]
cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row)
elif type(val) is list: # Multi row
for row in val:
row[key_col] = key
row["json_id"] = json_row["json_id"]
cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row)
else: # Map as list
for row in data[node]:
row["json_id"] = json_row["json_id"]
if import_cols:
row = {key: row[key] for key in row if key in import_cols} # Filter row by import_cols
cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row)
# Cleanup json row
if not data:
self.log.debug("Cleanup json row for %s" % file_path)
cur.execute("DELETE FROM json WHERE json_id = %s" % json_row["json_id"])
return True
if __name__ == "__main__":
s = time.time()
console_log = logging.StreamHandler()
logging.getLogger('').setLevel(logging.DEBUG)
logging.getLogger('').addHandler(console_log)
console_log.setLevel(logging.DEBUG)
dbjson = Db(json.load(open("zerotalk.schema.json")), "data/users/zerotalk.db")
dbjson.collect_stats = True
dbjson.checkTables()
cur = dbjson.getCursor()
cur.logging = False
dbjson.updateJson("data/users/content.json", cur=cur)
for user_dir in os.listdir("data/users"):
if os.path.isdir("data/users/%s" % user_dir):
dbjson.updateJson("data/users/%s/data.json" % user_dir, cur=cur)
# print ".",
cur.logging = True
print("Done in %.3fs" % (time.time() - s))
for query, stats in sorted(dbjson.query_stats.items()):
print("-", query, stats)

View File

@ -1,246 +0,0 @@
import time
import re
from util import helper
# Special sqlite cursor
class DbCursor:
def __init__(self, db):
self.db = db
self.logging = False
def quoteValue(self, value):
if type(value) is int:
return str(value)
else:
return "'%s'" % value.replace("'", "''")
def parseQuery(self, query, params):
query_type = query.split(" ", 1)[0].upper()
if isinstance(params, dict) and "?" in query: # Make easier select and insert by allowing dict params
if query_type in ("SELECT", "DELETE", "UPDATE"):
# Convert param dict to SELECT * FROM table WHERE key = ? AND key2 = ? format
query_wheres = []
values = []
for key, value in params.items():
if type(value) is list:
if key.startswith("not__"):
field = key.replace("not__", "")
operator = "NOT IN"
else:
field = key
operator = "IN"
if len(value) > 100:
# Embed values in query to avoid "too many SQL variables" error
query_values = ",".join(map(helper.sqlquote, value))
else:
query_values = ",".join(["?"] * len(value))
values += value
query_wheres.append(
"%s %s (%s)" %
(field, operator, query_values)
)
else:
if key.startswith("not__"):
query_wheres.append(key.replace("not__", "") + " != ?")
elif key.endswith("__like"):
query_wheres.append(key.replace("__like", "") + " LIKE ?")
elif key.endswith(">"):
query_wheres.append(key.replace(">", "") + " > ?")
elif key.endswith("<"):
query_wheres.append(key.replace("<", "") + " < ?")
else:
query_wheres.append(key + " = ?")
values.append(value)
wheres = " AND ".join(query_wheres)
if wheres == "":
wheres = "1"
query = re.sub("(.*)[?]", "\\1 %s" % wheres, query) # Replace the last ?
params = values
else:
# Convert param dict to INSERT INTO table (key, key2) VALUES (?, ?) format
keys = ", ".join(params.keys())
values = ", ".join(['?' for key in params.keys()])
keysvalues = "(%s) VALUES (%s)" % (keys, values)
query = re.sub("(.*)[?]", "\\1%s" % keysvalues, query) # Replace the last ?
params = tuple(params.values())
elif isinstance(params, dict) and ":" in query:
new_params = dict()
values = []
for key, value in params.items():
if type(value) is list:
for idx, val in enumerate(value):
new_params[key + "__" + str(idx)] = val
new_names = [":" + key + "__" + str(idx) for idx in range(len(value))]
query = re.sub(r":" + re.escape(key) + r"([)\s]|$)", "(%s)%s" % (", ".join(new_names), r"\1"), query)
else:
new_params[key] = value
params = new_params
return query, params
def execute(self, query, params=None):
query = query.strip()
while self.db.progress_sleeping or self.db.commiting:
time.sleep(0.1)
self.db.last_query_time = time.time()
query, params = self.parseQuery(query, params)
cursor = self.db.getConn().cursor()
self.db.cursors.add(cursor)
if self.db.lock.locked():
self.db.log.debug("Locked for %.3fs" % (time.time() - self.db.lock.time_lock))
try:
s = time.time()
self.db.lock.acquire(True)
if query.upper().strip("; ") == "VACUUM":
self.db.commit("vacuum called")
if params:
res = cursor.execute(query, params)
else:
res = cursor.execute(query)
finally:
self.db.lock.release()
taken_query = time.time() - s
if self.logging or taken_query > 1:
if params: # Query has parameters
self.db.log.debug("Query: " + query + " " + str(params) + " (Done in %.4f)" % (time.time() - s))
else:
self.db.log.debug("Query: " + query + " (Done in %.4f)" % (time.time() - s))
# Log query stats
if self.db.collect_stats:
if query not in self.db.query_stats:
self.db.query_stats[query] = {"call": 0, "time": 0.0}
self.db.query_stats[query]["call"] += 1
self.db.query_stats[query]["time"] += time.time() - s
query_type = query.split(" ", 1)[0].upper()
is_update_query = query_type in ["UPDATE", "DELETE", "INSERT", "CREATE"]
if not self.db.need_commit and is_update_query:
self.db.need_commit = True
if is_update_query:
return cursor
else:
return res
def executemany(self, query, params):
while self.db.progress_sleeping or self.db.commiting:
time.sleep(0.1)
self.db.last_query_time = time.time()
s = time.time()
cursor = self.db.getConn().cursor()
self.db.cursors.add(cursor)
try:
self.db.lock.acquire(True)
cursor.executemany(query, params)
finally:
self.db.lock.release()
taken_query = time.time() - s
if self.logging or taken_query > 0.1:
self.db.log.debug("Execute many: %s (Done in %.4f)" % (query, taken_query))
self.db.need_commit = True
return cursor
# Creates on updates a database row without incrementing the rowid
def insertOrUpdate(self, table, query_sets, query_wheres, oninsert={}):
sql_sets = ["%s = :%s" % (key, key) for key in query_sets.keys()]
sql_wheres = ["%s = :%s" % (key, key) for key in query_wheres.keys()]
params = query_sets
params.update(query_wheres)
res = self.execute(
"UPDATE %s SET %s WHERE %s" % (table, ", ".join(sql_sets), " AND ".join(sql_wheres)),
params
)
if res.rowcount == 0:
params.update(oninsert) # Add insert-only fields
self.execute("INSERT INTO %s ?" % table, params)
# Create new table
# Return: True on success
def createTable(self, table, cols):
# TODO: Check current structure
self.execute("DROP TABLE IF EXISTS %s" % table)
col_definitions = []
for col_name, col_type in cols:
col_definitions.append("%s %s" % (col_name, col_type))
self.execute("CREATE TABLE %s (%s)" % (table, ",".join(col_definitions)))
return True
# Create indexes on table
# Return: True on success
def createIndexes(self, table, indexes):
for index in indexes:
if not index.strip().upper().startswith("CREATE"):
self.db.log.error("Index command should start with CREATE: %s" % index)
continue
self.execute(index)
# Create table if not exist
# Return: True if updated
def needTable(self, table, cols, indexes=None, version=1):
current_version = self.db.getTableVersion(table)
if int(current_version) < int(version): # Table need update or not extis
self.db.log.debug("Table %s outdated...version: %s need: %s, rebuilding..." % (table, current_version, version))
self.createTable(table, cols)
if indexes:
self.createIndexes(table, indexes)
self.execute(
"INSERT OR REPLACE INTO keyvalue ?",
{"json_id": 0, "key": "table.%s.version" % table, "value": version}
)
return True
else: # Not changed
return False
# Get or create a row for json file
# Return: The database row
def getJsonRow(self, file_path):
directory, file_name = re.match("^(.*?)/*([^/]*)$", file_path).groups()
if self.db.schema["version"] == 1:
# One path field
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"path": file_path})
row = res.fetchone()
if not row: # No row yet, create it
self.execute("INSERT INTO json ?", {"path": file_path})
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"path": file_path})
row = res.fetchone()
elif self.db.schema["version"] == 2:
# Separate directory, file_name (easier join)
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"directory": directory, "file_name": file_name})
row = res.fetchone()
if not row: # No row yet, create it
self.execute("INSERT INTO json ?", {"directory": directory, "file_name": file_name})
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"directory": directory, "file_name": file_name})
row = res.fetchone()
elif self.db.schema["version"] == 3:
# Separate site, directory, file_name (for merger sites)
site_address, directory = re.match("^([^/]*)/(.*)$", directory).groups()
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"site": site_address, "directory": directory, "file_name": file_name})
row = res.fetchone()
if not row: # No row yet, create it
self.execute("INSERT INTO json ?", {"site": site_address, "directory": directory, "file_name": file_name})
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"site": site_address, "directory": directory, "file_name": file_name})
row = res.fetchone()
else:
raise Exception("Dbschema version %s not supported" % self.db.schema.get("version"))
return row
def close(self):
pass

View File

@ -1,46 +0,0 @@
import re
# Parse and modify sql queries
class DbQuery:
def __init__(self, query):
self.setQuery(query.strip())
# Split main parts of query
def parseParts(self, query):
parts = re.split("(SELECT|FROM|WHERE|ORDER BY|LIMIT)", query)
parts = [_f for _f in parts if _f] # Remove empty parts
parts = [s.strip() for s in parts] # Remove whitespace
return dict(list(zip(parts[0::2], parts[1::2])))
# Parse selected fields SELECT ... FROM
def parseFields(self, query_select):
fields = re.findall("([^,]+) AS ([^,]+)", query_select)
return {key: val.strip() for val, key in fields}
# Parse query conditions WHERE ...
def parseWheres(self, query_where):
if " AND " in query_where:
return query_where.split(" AND ")
elif query_where:
return [query_where]
else:
return []
# Set the query
def setQuery(self, query):
self.parts = self.parseParts(query)
self.fields = self.parseFields(self.parts["SELECT"])
self.wheres = self.parseWheres(self.parts.get("WHERE", ""))
# Convert query back to string
def __str__(self):
query_parts = []
for part_name in ["SELECT", "FROM", "WHERE", "ORDER BY", "LIMIT"]:
if part_name == "WHERE" and self.wheres:
query_parts.append("WHERE")
query_parts.append(" AND ".join(self.wheres))
elif part_name in self.parts:
query_parts.append(part_name)
query_parts.append(self.parts[part_name])
return "\n".join(query_parts)

View File

View File

@ -1,186 +0,0 @@
import sys
import os
import re
from Config import config
# Non fatal exception
class Notify(Exception):
def __init__(self, message=None):
if message:
self.message = message
def __str__(self):
return self.message
# Gevent greenlet.kill accept Exception type
def createNotifyType(message):
return type("Notify", (Notify, ), {"message": message})
def formatExceptionMessage(err):
err_type = err.__class__.__name__
if err.args:
err_message = err.args[-1]
else:
err_message = err.__str__()
return "%s: %s" % (err_type, err_message)
python_lib_dirs = [path.replace("\\", "/") for path in sys.path if re.sub(r".*[\\/]", "", path) in ("site-packages", "dist-packages")]
python_lib_dirs.append(os.path.dirname(os.__file__).replace("\\", "/")) # TODO: check if returns the correct path for PyPy
root_dir = os.path.realpath(os.path.dirname(__file__) + "/../../")
root_dir = root_dir.replace("\\", "/")
def formatTraceback(items, limit=None, fold_builtin=True):
back = []
i = 0
prev_file_title = ""
is_prev_builtin = False
for path, line in items:
i += 1
is_last = i == len(items)
path = path.replace("\\", "/")
if path.startswith("src/gevent/"):
file_title = "<gevent>/" + path[len("src/gevent/"):]
is_builtin = True
is_skippable_builtin = False
elif path in ("<frozen importlib._bootstrap>", "<frozen importlib._bootstrap_external>"):
file_title = "(importlib)"
is_builtin = True
is_skippable_builtin = True
else:
is_skippable_builtin = False
for base in python_lib_dirs:
if path.startswith(base + "/"):
file_title = path[len(base + "/"):]
module_name, *tail = file_title.split("/")
if module_name.endswith(".py"):
module_name = module_name[:-3]
file_title = "/".join(["<%s>" % module_name] + tail)
is_builtin = True
break
else:
is_builtin = False
for base in (root_dir + "/src", root_dir + "/plugins", root_dir):
if path.startswith(base + "/"):
file_title = path[len(base + "/"):]
break
else:
# For unknown paths, do our best to hide absolute path
file_title = path
for needle in ("/zeronet/", "/core/"):
if needle in file_title.lower():
file_title = "?/" + file_title[file_title.lower().rindex(needle) + len(needle):]
# Path compression: A/AB/ABC/X/Y.py -> ABC/X/Y.py
# E.g.: in 'Db/DbCursor.py' the directory part is unnecessary
if not file_title.startswith("/"):
prev_part = ""
for i, part in enumerate(file_title.split("/") + [""]):
if not part.startswith(prev_part):
break
prev_part = part
file_title = "/".join(file_title.split("/")[i - 1:])
if is_skippable_builtin and fold_builtin:
pass
elif is_builtin and is_prev_builtin and not is_last and fold_builtin:
if back[-1] != "...":
back.append("...")
else:
if file_title == prev_file_title:
back.append("%s" % line)
else:
back.append("%s line %s" % (file_title, line))
prev_file_title = file_title
is_prev_builtin = is_builtin
if limit and i >= limit:
back.append("...")
break
return back
def formatException(err=None, format="text"):
import traceback
if type(err) == Notify:
return err
elif type(err) == tuple and err and err[0] is not None: # Passed trackeback info
exc_type, exc_obj, exc_tb = err
err = None
else: # No trackeback info passed, get latest
exc_type, exc_obj, exc_tb = sys.exc_info()
if not err:
if hasattr(err, "message"):
err = exc_obj.message
else:
err = exc_obj
tb = formatTraceback([[frame[0], frame[1]] for frame in traceback.extract_tb(exc_tb)])
if format == "html":
return "%s: %s<br><small class='multiline'>%s</small>" % (repr(err), err, " > ".join(tb))
else:
return "%s: %s in %s" % (exc_type.__name__, err, " > ".join(tb))
def formatStack(limit=None):
import inspect
tb = formatTraceback([[frame[1], frame[2]] for frame in inspect.stack()[1:]], limit=limit)
return " > ".join(tb)
# Test if gevent eventloop blocks
import logging
import gevent
import time
num_block = 0
def testBlock():
global num_block
logging.debug("Gevent block checker started")
last_time = time.time()
while 1:
time.sleep(1)
if time.time() - last_time > 1.1:
logging.debug("Gevent block detected: %.3fs" % (time.time() - last_time - 1))
num_block += 1
last_time = time.time()
gevent.spawn(testBlock)
if __name__ == "__main__":
try:
print(1 / 0)
except Exception as err:
print(type(err).__name__)
print("1/0 error: %s" % formatException(err))
def loadJson():
json.loads("Errr")
import json
try:
loadJson()
except Exception as err:
print(err)
print("Json load error: %s" % formatException(err))
try:
raise Notify("nothing...")
except Exception as err:
print("Notify: %s" % formatException(err))
loadJson()

View File

@ -1,115 +0,0 @@
import sys
import logging
import signal
import importlib
import gevent
import gevent.hub
from Config import config
from . import Debug
last_error = None
def shutdown(reason="Unknown"):
logging.info("Shutting down (reason: %s)..." % reason)
import main
if "file_server" in dir(main):
try:
gevent.spawn(main.file_server.stop)
if "ui_server" in dir(main):
gevent.spawn(main.ui_server.stop)
except Exception as err:
print("Proper shutdown error: %s" % err)
sys.exit(0)
else:
sys.exit(0)
# Store last error, ignore notify, allow manual error logging
def handleError(*args, **kwargs):
global last_error
if not args: # Manual called
args = sys.exc_info()
silent = True
else:
silent = False
if args[0].__name__ != "Notify":
last_error = args
if args[0].__name__ == "KeyboardInterrupt":
shutdown("Keyboard interrupt")
elif not silent and args[0].__name__ != "Notify":
logging.exception("Unhandled exception")
if "greenlet.py" not in args[2].tb_frame.f_code.co_filename: # Don't display error twice
sys.__excepthook__(*args, **kwargs)
# Ignore notify errors
def handleErrorNotify(*args, **kwargs):
err = args[0]
if err.__name__ == "KeyboardInterrupt":
shutdown("Keyboard interrupt")
elif err.__name__ != "Notify":
logging.error("Unhandled exception: %s" % Debug.formatException(args))
sys.__excepthook__(*args, **kwargs)
if config.debug: # Keep last error for /Debug
sys.excepthook = handleError
else:
sys.excepthook = handleErrorNotify
# Override default error handler to allow silent killing / custom logging
if "handle_error" in dir(gevent.hub.Hub):
gevent.hub.Hub._original_handle_error = gevent.hub.Hub.handle_error
else:
logging.debug("gevent.hub.Hub.handle_error not found using old gevent hooks")
OriginalGreenlet = gevent.Greenlet
class ErrorhookedGreenlet(OriginalGreenlet):
def _report_error(self, exc_info):
sys.excepthook(exc_info[0], exc_info[1], exc_info[2])
gevent.Greenlet = gevent.greenlet.Greenlet = ErrorhookedGreenlet
importlib.reload(gevent)
def handleGreenletError(context, type, value, tb):
if context.__class__ is tuple and context[0].__class__.__name__ == "ThreadPool":
# Exceptions in ThreadPool will be handled in the main Thread
return None
if isinstance(value, str):
# Cython can raise errors where the value is a plain string
# e.g., AttributeError, "_semaphore.Semaphore has no attr", <traceback>
value = type(value)
if not issubclass(type, gevent.get_hub().NOT_ERROR):
sys.excepthook(type, value, tb)
gevent.get_hub().handle_error = handleGreenletError
try:
signal.signal(signal.SIGTERM, lambda signum, stack_frame: shutdown("SIGTERM"))
except Exception as err:
logging.debug("Error setting up SIGTERM watcher: %s" % err)
if __name__ == "__main__":
import time
from gevent import monkey
monkey.patch_all(thread=False, ssl=False)
from . import Debug
def sleeper(num):
print("started", num)
time.sleep(3)
raise Exception("Error")
print("stopped", num)
thread1 = gevent.spawn(sleeper, 1)
thread2 = gevent.spawn(sleeper, 2)
time.sleep(1)
print("killing...")
thread1.kill(exception=Debug.Notify("Worker stopped"))
#thread2.throw(Debug.Notify("Throw"))
print("killed")
gevent.joinall([thread1,thread2])

View File

@ -1,24 +0,0 @@
import time
import logging
import gevent.lock
from Debug import Debug
class DebugLock:
def __init__(self, log_after=0.01, name="Lock"):
self.name = name
self.log_after = log_after
self.lock = gevent.lock.Semaphore(1)
self.release = self.lock.release
def acquire(self, *args, **kwargs):
s = time.time()
res = self.lock.acquire(*args, **kwargs)
time_taken = time.time() - s
if time_taken >= self.log_after:
logging.debug("%s: Waited %.3fs after called by %s" %
(self.name, time_taken, Debug.formatStack())
)
return res

View File

@ -1,135 +0,0 @@
import os
import subprocess
import re
import logging
import time
import functools
from Config import config
from util import helper
# Find files with extension in path
def findfiles(path, find_ext):
def sorter(f1, f2):
f1 = f1[0].replace(path, "")
f2 = f2[0].replace(path, "")
if f1 == "":
return 1
elif f2 == "":
return -1
else:
return helper.cmp(f1.lower(), f2.lower())
for root, dirs, files in sorted(os.walk(path, topdown=False), key=functools.cmp_to_key(sorter)):
for file in sorted(files):
file_path = root + "/" + file
file_ext = file.split(".")[-1]
if file_ext in find_ext and not file.startswith("all."):
yield file_path.replace("\\", "/")
# Try to find coffeescript compiler in path
def findCoffeescriptCompiler():
coffeescript_compiler = None
try:
import distutils.spawn
coffeescript_compiler = helper.shellquote(distutils.spawn.find_executable("coffee")) + " --no-header -p"
except:
pass
if coffeescript_compiler:
return coffeescript_compiler
else:
return False
# Generates: all.js: merge *.js, compile coffeescript, all.css: merge *.css, vendor prefix features
def merge(merged_path):
merged_path = merged_path.replace("\\", "/")
merge_dir = os.path.dirname(merged_path)
s = time.time()
ext = merged_path.split(".")[-1]
if ext == "js": # If merging .js find .coffee too
find_ext = ["js", "coffee"]
else:
find_ext = [ext]
# If exist check the other files modification date
if os.path.isfile(merged_path):
merged_mtime = os.path.getmtime(merged_path)
else:
merged_mtime = 0
changed = {}
for file_path in findfiles(merge_dir, find_ext):
if os.path.getmtime(file_path) > merged_mtime + 1:
changed[file_path] = True
if not changed:
return # Assets not changed, nothing to do
old_parts = {}
if os.path.isfile(merged_path): # Find old parts to avoid unncessary recompile
merged_old = open(merged_path, "rb").read()
for match in re.findall(rb"(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL):
old_parts[match[1].decode()] = match[2].strip(b"\n\r")
logging.debug("Merging %s (changed: %s, old parts: %s)" % (merged_path, changed, len(old_parts)))
# Merge files
parts = []
s_total = time.time()
for file_path in findfiles(merge_dir, find_ext):
file_relative_path = file_path.replace(merge_dir + "/", "")
parts.append(b"\n/* ---- %s ---- */\n\n" % file_relative_path.encode("utf8"))
if file_path.endswith(".coffee"): # Compile coffee script
if file_path in changed or file_relative_path not in old_parts: # Only recompile if changed or its not compiled before
if config.coffeescript_compiler is None:
config.coffeescript_compiler = findCoffeescriptCompiler()
if not config.coffeescript_compiler:
logging.error("No coffeescript compiler defined, skipping compiling %s" % merged_path)
return False # No coffeescript compiler, skip this file
# Replace / with os separators and escape it
file_path_escaped = helper.shellquote(file_path.replace("/", os.path.sep))
if "%s" in config.coffeescript_compiler: # Replace %s with coffeescript file
command = config.coffeescript_compiler.replace("%s", file_path_escaped)
else: # Put coffeescript file to end
command = config.coffeescript_compiler + " " + file_path_escaped
# Start compiling
s = time.time()
compiler = subprocess.Popen(command, shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
out = compiler.stdout.read()
compiler.wait()
logging.debug("Running: %s (Done in %.2fs)" % (command, time.time() - s))
# Check errors
if out and out.startswith(b"("): # No error found
parts.append(out)
else: # Put error message in place of source code
error = out
logging.error("%s Compile error: %s" % (file_relative_path, error))
error_escaped = re.escape(error).replace(b"\n", b"\\n").replace(br"\\n", br"\n")
parts.append(
b"alert('%s compile error: %s');" %
(file_relative_path.encode(), error_escaped)
)
else: # Not changed use the old_part
parts.append(old_parts[file_relative_path])
else: # Add to parts
parts.append(open(file_path, "rb").read())
merged = b"\n".join(parts)
if ext == "css": # Vendor prefix css
from lib.cssvendor import cssvendor
merged = cssvendor.prefix(merged)
merged = merged.replace(b"\r", b"")
open(merged_path, "wb").write(merged)
logging.debug("Merged %s (%.2fs)" % (merged_path, time.time() - s_total))
if __name__ == "__main__":
logging.getLogger().setLevel(logging.DEBUG)
os.chdir("..")
config.coffeescript_compiler = r'type "%s" | tools\coffee-node\bin\node.exe tools\coffee-node\bin\coffee --no-header -s -p'
merge("data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/js/all.js")

View File

@ -1,69 +0,0 @@
import logging
import time
import os
from Config import config
if config.debug and config.action == "main":
try:
import watchdog
import watchdog.observers
import watchdog.events
logging.debug("Watchdog fs listener detected, source code autoreload enabled")
enabled = True
except Exception as err:
logging.debug("Watchdog fs listener could not be loaded: %s" % err)
enabled = False
else:
enabled = False
class DebugReloader:
def __init__(self, paths=None):
if not paths:
paths = ["src", "plugins", config.data_dir + "/__plugins__"]
self.log = logging.getLogger("DebugReloader")
self.last_chaged = 0
self.callbacks = []
if enabled:
self.observer = watchdog.observers.Observer()
event_handler = watchdog.events.FileSystemEventHandler()
event_handler.on_modified = event_handler.on_deleted = self.onChanged
event_handler.on_created = event_handler.on_moved = self.onChanged
for path in paths:
if not os.path.isdir(path):
continue
self.log.debug("Adding autoreload: %s" % path)
self.observer.schedule(event_handler, path, recursive=True)
self.observer.start()
def addCallback(self, f):
self.callbacks.append(f)
def onChanged(self, evt):
path = evt.src_path
ext = path.rsplit(".", 1)[-1]
if ext not in ["py", "json"] or "Test" in path or time.time() - self.last_chaged < 1.0:
return False
self.last_chaged = time.time()
if os.path.isfile(path):
time_modified = os.path.getmtime(path)
else:
time_modified = 0
self.log.debug("File changed: %s reloading source code (modified %.3fs ago)" % (evt, time.time() - time_modified))
if time.time() - time_modified > 5: # Probably it's just an attribute change, ignore it
return False
time.sleep(0.1) # Wait for lock release
for callback in self.callbacks:
try:
callback()
except Exception as err:
self.log.exception(err)
def stop(self):
if enabled:
self.observer.stop()
self.log.debug("Stopped autoreload observer")
watcher = DebugReloader()

View File

View File

@ -1,448 +0,0 @@
# Included modules
import os
import time
import json
import collections
import itertools
# Third party modules
import gevent
from Debug import Debug
from Config import config
from util import RateLimit
from util import Msgpack
from util import helper
from Plugin import PluginManager
from contextlib import closing
FILE_BUFF = 1024 * 512
class RequestError(Exception):
pass
# Incoming requests
@PluginManager.acceptPlugins
class FileRequest(object):
__slots__ = ("server", "connection", "req_id", "sites", "log", "responded")
def __init__(self, server, connection):
self.server = server
self.connection = connection
self.req_id = None
self.sites = self.server.sites
self.log = server.log
self.responded = False # Responded to the request
def send(self, msg, streaming=False):
if not self.connection.closed:
self.connection.send(msg, streaming)
def sendRawfile(self, file, read_bytes):
if not self.connection.closed:
self.connection.sendRawfile(file, read_bytes)
def response(self, msg, streaming=False):
if self.responded:
if config.verbose:
self.log.debug("Req id %s already responded" % self.req_id)
return
if not isinstance(msg, dict): # If msg not a dict create a {"body": msg}
msg = {"body": msg}
msg["cmd"] = "response"
msg["to"] = self.req_id
self.responded = True
self.send(msg, streaming=streaming)
# Route file requests
def route(self, cmd, req_id, params):
self.req_id = req_id
# Don't allow other sites than locked
if "site" in params and self.connection.target_onion:
valid_sites = self.connection.getValidSites()
if params["site"] not in valid_sites and valid_sites != ["global"]:
self.response({"error": "Invalid site"})
self.connection.log(
"Site lock violation: %s not in %s, target onion: %s" %
(params["site"], valid_sites, self.connection.target_onion)
)
self.connection.badAction(5)
return False
if cmd == "update":
event = "%s update %s %s" % (self.connection.id, params["site"], params["inner_path"])
# If called more than once within 15 sec only keep the last update
RateLimit.callAsync(event, max(self.connection.bad_actions, 15), self.actionUpdate, params)
else:
func_name = "action" + cmd[0].upper() + cmd[1:]
func = getattr(self, func_name, None)
if cmd not in ["getFile", "streamFile"]: # Skip IO bound functions
if self.connection.cpu_time > 0.5:
self.log.debug(
"Delay %s %s, cpu_time used by connection: %.3fs" %
(self.connection.ip, cmd, self.connection.cpu_time)
)
time.sleep(self.connection.cpu_time)
if self.connection.cpu_time > 5:
self.connection.close("Cpu time: %.3fs" % self.connection.cpu_time)
s = time.time()
if func:
func(params)
else:
self.actionUnknown(cmd, params)
if cmd not in ["getFile", "streamFile"]:
taken = time.time() - s
taken_sent = self.connection.last_sent_time - self.connection.last_send_time
self.connection.cpu_time += taken - taken_sent
# Update a site file request
def actionUpdate(self, params):
site = self.sites.get(params["site"])
if not site or not site.isServing(): # Site unknown or not serving
self.response({"error": "Unknown site"})
self.connection.badAction(1)
self.connection.badAction(5)
return False
inner_path = params.get("inner_path", "")
current_content_modified = site.content_manager.contents.get(inner_path, {}).get("modified", 0)
body = params["body"]
if not inner_path.endswith("content.json"):
self.response({"error": "Only content.json update allowed"})
self.connection.badAction(5)
return
should_validate_content = True
if "modified" in params and params["modified"] <= current_content_modified:
should_validate_content = False
valid = None # Same or earlier content as we have
elif not body: # No body sent, we have to download it first
site.log.debug("Missing body from update for file %s, downloading ..." % inner_path)
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update") # Add or get peer
try:
body = peer.getFile(site.address, inner_path).read()
except Exception as err:
site.log.debug("Can't download updated file %s: %s" % (inner_path, err))
self.response({"error": "File invalid update: Can't download updaed file"})
self.connection.badAction(5)
return
if should_validate_content:
try:
content = json.loads(body.decode())
except Exception as err:
site.log.debug("Update for %s is invalid JSON: %s" % (inner_path, err))
self.response({"error": "File invalid JSON"})
self.connection.badAction(5)
return
file_uri = "%s/%s:%s" % (site.address, inner_path, content["modified"])
if self.server.files_parsing.get(file_uri): # Check if we already working on it
valid = None # Same file
else:
try:
valid = site.content_manager.verifyFile(inner_path, content)
except Exception as err:
site.log.debug("Update for %s is invalid: %s" % (inner_path, err))
error = err
valid = False
if valid is True: # Valid and changed
site.log.info("Update for %s looks valid, saving..." % inner_path)
self.server.files_parsing[file_uri] = True
site.storage.write(inner_path, body)
del params["body"]
site.onFileDone(inner_path) # Trigger filedone
if inner_path.endswith("content.json"): # Download every changed file from peer
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update") # Add or get peer
# On complete publish to other peers
diffs = params.get("diffs", {})
site.onComplete.once(lambda: site.publish(inner_path=inner_path, diffs=diffs, limit=3), "publish_%s" % inner_path)
# Load new content file and download changed files in new thread
def downloader():
site.downloadContent(inner_path, peer=peer, diffs=params.get("diffs", {}))
del self.server.files_parsing[file_uri]
gevent.spawn(downloader)
else:
del self.server.files_parsing[file_uri]
self.response({"ok": "Thanks, file %s updated!" % inner_path})
self.connection.goodAction()
elif valid is None: # Not changed
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update old") # Add or get peer
if peer:
if not peer.connection:
peer.connect(self.connection) # Assign current connection to peer
if inner_path in site.content_manager.contents:
peer.last_content_json_update = site.content_manager.contents[inner_path]["modified"]
if config.verbose:
site.log.debug(
"Same version, adding new peer for locked files: %s, tasks: %s" %
(peer.key, len(site.worker_manager.tasks))
)
for task in site.worker_manager.tasks: # New peer add to every ongoing task
if task["peers"] and not task["optional_hash_id"]:
# Download file from this peer too if its peer locked
site.needFile(task["inner_path"], peer=peer, update=True, blocking=False)
self.response({"ok": "File not changed"})
self.connection.badAction()
else: # Invalid sign or sha hash
self.response({"error": "File %s invalid: %s" % (inner_path, error)})
self.connection.badAction(5)
def isReadable(self, site, inner_path, file, pos):
return True
# Send file content request
def handleGetFile(self, params, streaming=False):
site = self.sites.get(params["site"])
if not site or not site.isServing(): # Site unknown or not serving
self.response({"error": "Unknown site"})
self.connection.badAction(5)
return False
try:
file_path = site.storage.getPath(params["inner_path"])
if streaming:
file_obj = site.storage.open(params["inner_path"])
else:
file_obj = Msgpack.FilePart(file_path, "rb")
with file_obj as file:
file.seek(params["location"])
read_bytes = params.get("read_bytes", FILE_BUFF)
file_size = os.fstat(file.fileno()).st_size
if file_size > read_bytes: # Check if file is readable at current position (for big files)
if not self.isReadable(site, params["inner_path"], file, params["location"]):
raise RequestError("File not readable at position: %s" % params["location"])
else:
if params.get("file_size") and params["file_size"] != file_size:
self.connection.badAction(2)
raise RequestError("File size does not match: %sB != %sB" % (params["file_size"], file_size))
if not streaming:
file.read_bytes = read_bytes
if params["location"] > file_size:
self.connection.badAction(5)
raise RequestError("Bad file location")
if streaming:
back = {
"size": file_size,
"location": min(file.tell() + read_bytes, file_size),
"stream_bytes": min(read_bytes, file_size - params["location"])
}
self.response(back)
self.sendRawfile(file, read_bytes=read_bytes)
else:
back = {
"body": file,
"size": file_size,
"location": min(file.tell() + file.read_bytes, file_size)
}
self.response(back, streaming=True)
bytes_sent = min(read_bytes, file_size - params["location"]) # Number of bytes we going to send
site.settings["bytes_sent"] = site.settings.get("bytes_sent", 0) + bytes_sent
if config.debug_socket:
self.log.debug("File %s at position %s sent %s bytes" % (file_path, params["location"], bytes_sent))
# Add peer to site if not added before
connected_peer = site.addPeer(self.connection.ip, self.connection.port, source="request")
if connected_peer: # Just added
connected_peer.connect(self.connection) # Assign current connection to peer
return {"bytes_sent": bytes_sent, "file_size": file_size, "location": params["location"]}
except RequestError as err:
self.log.debug("GetFile %s %s %s request error: %s" % (self.connection, params["site"], params["inner_path"], Debug.formatException(err)))
self.response({"error": "File read error: %s" % err})
except OSError as err:
if config.verbose:
self.log.debug("GetFile read error: %s" % Debug.formatException(err))
self.response({"error": "File read error"})
return False
except Exception as err:
self.log.error("GetFile exception: %s" % Debug.formatException(err))
self.response({"error": "File read exception"})
return False
def actionGetFile(self, params):
return self.handleGetFile(params)
def actionStreamFile(self, params):
return self.handleGetFile(params, streaming=True)
# Peer exchange request
def actionPex(self, params):
site = self.sites.get(params["site"])
if not site or not site.isServing(): # Site unknown or not serving
self.response({"error": "Unknown site"})
self.connection.badAction(5)
return False
got_peer_keys = []
added = 0
# Add requester peer to site
connected_peer = site.addPeer(self.connection.ip, self.connection.port, source="request")
if connected_peer: # It was not registered before
added += 1
connected_peer.connect(self.connection) # Assign current connection to peer
# Add sent peers to site
for packed_address in itertools.chain(params.get("peers", []), params.get("peers_ipv6", [])):
address = helper.unpackAddress(packed_address)
got_peer_keys.append("%s:%s" % address)
if site.addPeer(*address, source="pex"):
added += 1
# Add sent onion peers to site
for packed_address in params.get("peers_onion", []):
address = helper.unpackOnionAddress(packed_address)
got_peer_keys.append("%s:%s" % address)
if site.addPeer(*address, source="pex"):
added += 1
# Send back peers that is not in the sent list and connectable (not port 0)
packed_peers = helper.packPeers(site.getConnectablePeers(params["need"], ignore=got_peer_keys, allow_private=False))
if added:
site.worker_manager.onPeers()
if config.verbose:
self.log.debug(
"Added %s peers to %s using pex, sending back %s" %
(added, site, {key: len(val) for key, val in packed_peers.items()})
)
back = {
"peers": packed_peers["ipv4"],
"peers_ipv6": packed_peers["ipv6"],
"peers_onion": packed_peers["onion"]
}
self.response(back)
# Get modified content.json files since
def actionListModified(self, params):
site = self.sites.get(params["site"])
if not site or not site.isServing(): # Site unknown or not serving
self.response({"error": "Unknown site"})
self.connection.badAction(5)
return False
modified_files = site.content_manager.listModified(params["since"])
# Add peer to site if not added before
connected_peer = site.addPeer(self.connection.ip, self.connection.port, source="request")
if connected_peer: # Just added
connected_peer.connect(self.connection) # Assign current connection to peer
self.response({"modified_files": modified_files})
def actionGetHashfield(self, params):
site = self.sites.get(params["site"])
if not site or not site.isServing(): # Site unknown or not serving
self.response({"error": "Unknown site"})
self.connection.badAction(5)
return False
# Add peer to site if not added before
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="request")
if not peer.connection: # Just added
peer.connect(self.connection) # Assign current connection to peer
peer.time_my_hashfield_sent = time.time() # Don't send again if not changed
self.response({"hashfield_raw": site.content_manager.hashfield.tobytes()})
def findHashIds(self, site, hash_ids, limit=100):
back = collections.defaultdict(lambda: collections.defaultdict(list))
found = site.worker_manager.findOptionalHashIds(hash_ids, limit=limit)
for hash_id, peers in found.items():
for peer in peers:
ip_type = helper.getIpType(peer.ip)
if len(back[ip_type][hash_id]) < 20:
back[ip_type][hash_id].append(peer.packMyAddress())
return back
def actionFindHashIds(self, params):
site = self.sites.get(params["site"])
s = time.time()
if not site or not site.isServing(): # Site unknown or not serving
self.response({"error": "Unknown site"})
self.connection.badAction(5)
return False
event_key = "%s_findHashIds_%s_%s" % (self.connection.ip, params["site"], len(params["hash_ids"]))
if self.connection.cpu_time > 0.5 or not RateLimit.isAllowed(event_key, 60 * 5):
time.sleep(0.1)
back = self.findHashIds(site, params["hash_ids"], limit=10)
else:
back = self.findHashIds(site, params["hash_ids"])
RateLimit.called(event_key)
my_hashes = []
my_hashfield_set = set(site.content_manager.hashfield)
for hash_id in params["hash_ids"]:
if hash_id in my_hashfield_set:
my_hashes.append(hash_id)
if config.verbose:
self.log.debug(
"Found: %s for %s hashids in %.3fs" %
({key: len(val) for key, val in back.items()}, len(params["hash_ids"]), time.time() - s)
)
self.response({"peers": back["ipv4"], "peers_onion": back["onion"], "peers_ipv6": back["ipv6"], "my": my_hashes})
def actionSetHashfield(self, params):
site = self.sites.get(params["site"])
if not site or not site.isServing(): # Site unknown or not serving
self.response({"error": "Unknown site"})
self.connection.badAction(5)
return False
# Add or get peer
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, connection=self.connection, source="request")
if not peer.connection:
peer.connect(self.connection)
peer.hashfield.replaceFromBytes(params["hashfield_raw"])
self.response({"ok": "Updated"})
# Send a simple Pong! answer
def actionPing(self, params):
self.response(b"Pong!")
# Check requested port of the other peer
def actionCheckport(self, params):
if helper.getIpType(self.connection.ip) == "ipv6":
sock_address = (self.connection.ip, params["port"], 0, 0)
else:
sock_address = (self.connection.ip, params["port"])
with closing(helper.createSocket(self.connection.ip)) as sock:
sock.settimeout(5)
if sock.connect_ex(sock_address) == 0:
self.response({"status": "open", "ip_external": self.connection.ip})
else:
self.response({"status": "closed", "ip_external": self.connection.ip})
# Unknown command
def actionUnknown(self, cmd, params):
self.response({"error": "Unknown command: %s" % cmd})
self.connection.badAction(5)

View File

@ -1,403 +0,0 @@
import logging
import time
import random
import socket
import sys
import gevent
import gevent.pool
from gevent.server import StreamServer
import util
from util import helper
from Config import config
from .FileRequest import FileRequest
from Peer import PeerPortchecker
from Site import SiteManager
from Connection import ConnectionServer
from Plugin import PluginManager
from Debug import Debug
@PluginManager.acceptPlugins
class FileServer(ConnectionServer):
def __init__(self, ip=config.fileserver_ip, port=config.fileserver_port, ip_type=config.fileserver_ip_type):
self.site_manager = SiteManager.site_manager
self.portchecker = PeerPortchecker.PeerPortchecker(self)
self.log = logging.getLogger("FileServer")
self.ip_type = ip_type
self.ip_external_list = []
self.supported_ip_types = ["ipv4"] # Outgoing ip_type support
if helper.getIpType(ip) == "ipv6" or self.isIpv6Supported():
self.supported_ip_types.append("ipv6")
if ip_type == "ipv6" or (ip_type == "dual" and "ipv6" in self.supported_ip_types):
ip = ip.replace("*", "::")
else:
ip = ip.replace("*", "0.0.0.0")
if config.tor == "always":
port = config.tor_hs_port
config.fileserver_port = port
elif port == 0: # Use random port
port_range_from, port_range_to = list(map(int, config.fileserver_port_range.split("-")))
port = self.getRandomPort(ip, port_range_from, port_range_to)
config.fileserver_port = port
if not port:
raise Exception("Can't find bindable port")
if not config.tor == "always":
config.saveValue("fileserver_port", port) # Save random port value for next restart
config.arguments.fileserver_port = port
ConnectionServer.__init__(self, ip, port, self.handleRequest)
self.log.debug("Supported IP types: %s" % self.supported_ip_types)
if ip_type == "dual" and ip == "::":
# Also bind to ipv4 addres in dual mode
try:
self.log.debug("Binding proxy to %s:%s" % ("::", self.port))
self.stream_server_proxy = StreamServer(
("0.0.0.0", self.port), self.handleIncomingConnection, spawn=self.pool, backlog=100
)
except Exception as err:
self.log.info("StreamServer proxy create error: %s" % Debug.formatException(err))
self.port_opened = {}
self.sites = self.site_manager.sites
self.last_request = time.time()
self.files_parsing = {}
self.ui_server = None
def getRandomPort(self, ip, port_range_from, port_range_to):
self.log.info("Getting random port in range %s-%s..." % (port_range_from, port_range_to))
tried = []
for bind_retry in range(100):
port = random.randint(port_range_from, port_range_to)
if port in tried:
continue
tried.append(port)
sock = helper.createSocket(ip)
try:
sock.bind((ip, port))
success = True
except Exception as err:
self.log.warning("Error binding to port %s: %s" % (port, err))
success = False
sock.close()
if success:
self.log.info("Found unused random port: %s" % port)
return port
else:
time.sleep(0.1)
return False
def isIpv6Supported(self):
if config.tor == "always":
return True
# Test if we can connect to ipv6 address
ipv6_testip = "fcec:ae97:8902:d810:6c92:ec67:efb2:3ec5"
try:
sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
sock.connect((ipv6_testip, 80))
local_ipv6 = sock.getsockname()[0]
if local_ipv6 == "::1":
self.log.debug("IPv6 not supported, no local IPv6 address")
return False
else:
self.log.debug("IPv6 supported on IP %s" % local_ipv6)
return True
except socket.error as err:
self.log.warning("IPv6 not supported: %s" % err)
return False
except Exception as err:
self.log.error("IPv6 check error: %s" % err)
return False
def listenProxy(self):
try:
self.stream_server_proxy.serve_forever()
except Exception as err:
if err.errno == 98: # Address already in use error
self.log.debug("StreamServer proxy listen error: %s" % err)
else:
self.log.info("StreamServer proxy listen error: %s" % err)
# Handle request to fileserver
def handleRequest(self, connection, message):
if config.verbose:
if "params" in message:
self.log.debug(
"FileRequest: %s %s %s %s" %
(str(connection), message["cmd"], message["params"].get("site"), message["params"].get("inner_path"))
)
else:
self.log.debug("FileRequest: %s %s" % (str(connection), message["cmd"]))
req = FileRequest(self, connection)
req.route(message["cmd"], message.get("req_id"), message.get("params"))
if not self.has_internet and not connection.is_private_ip:
self.has_internet = True
self.onInternetOnline()
def onInternetOnline(self):
self.log.info("Internet online")
gevent.spawn(self.checkSites, check_files=False, force_port_check=True)
# Reload the FileRequest class to prevent restarts in debug mode
def reload(self):
global FileRequest
import imp
FileRequest = imp.load_source("FileRequest", "src/File/FileRequest.py").FileRequest
def portCheck(self):
if config.offline:
self.log.info("Offline mode: port check disabled")
res = {"ipv4": None, "ipv6": None}
self.port_opened = res
return res
if config.ip_external:
for ip_external in config.ip_external:
SiteManager.peer_blacklist.append((ip_external, self.port)) # Add myself to peer blacklist
ip_external_types = set([helper.getIpType(ip) for ip in config.ip_external])
res = {
"ipv4": "ipv4" in ip_external_types,
"ipv6": "ipv6" in ip_external_types
}
self.ip_external_list = config.ip_external
self.port_opened.update(res)
self.log.info("Server port opened based on configuration ipv4: %s, ipv6: %s" % (res["ipv4"], res["ipv6"]))
return res
self.port_opened = {}
if self.ui_server:
self.ui_server.updateWebsocket()
if "ipv6" in self.supported_ip_types:
res_ipv6_thread = gevent.spawn(self.portchecker.portCheck, self.port, "ipv6")
else:
res_ipv6_thread = None
res_ipv4 = self.portchecker.portCheck(self.port, "ipv4")
if not res_ipv4["opened"] and config.tor != "always":
if self.portchecker.portOpen(self.port):
res_ipv4 = self.portchecker.portCheck(self.port, "ipv4")
if res_ipv6_thread is None:
res_ipv6 = {"ip": None, "opened": None}
else:
res_ipv6 = res_ipv6_thread.get()
if res_ipv6["opened"] and not helper.getIpType(res_ipv6["ip"]) == "ipv6":
self.log.info("Invalid IPv6 address from port check: %s" % res_ipv6["ip"])
res_ipv6["opened"] = False
self.ip_external_list = []
for res_ip in [res_ipv4, res_ipv6]:
if res_ip["ip"] and res_ip["ip"] not in self.ip_external_list:
self.ip_external_list.append(res_ip["ip"])
SiteManager.peer_blacklist.append((res_ip["ip"], self.port))
self.log.info("Server port opened ipv4: %s, ipv6: %s" % (res_ipv4["opened"], res_ipv6["opened"]))
res = {"ipv4": res_ipv4["opened"], "ipv6": res_ipv6["opened"]}
# Add external IPs from local interfaces
interface_ips = helper.getInterfaceIps("ipv4")
if "ipv6" in self.supported_ip_types:
interface_ips += helper.getInterfaceIps("ipv6")
for ip in interface_ips:
if not helper.isPrivateIp(ip) and ip not in self.ip_external_list:
self.ip_external_list.append(ip)
res[helper.getIpType(ip)] = True # We have opened port if we have external ip
SiteManager.peer_blacklist.append((ip, self.port))
self.log.debug("External ip found on interfaces: %s" % ip)
self.port_opened.update(res)
if self.ui_server:
self.ui_server.updateWebsocket()
return res
# Check site file integrity
def checkSite(self, site, check_files=False):
if site.isServing():
site.announce(mode="startup") # Announce site to tracker
site.update(check_files=check_files) # Update site's content.json and download changed files
site.sendMyHashfield()
site.updateHashfield()
# Check sites integrity
@util.Noparallel()
def checkSites(self, check_files=False, force_port_check=False):
self.log.debug("Checking sites...")
s = time.time()
sites_checking = False
if not self.port_opened or force_port_check: # Test and open port if not tested yet
if len(self.sites) <= 2: # Don't wait port opening on first startup
sites_checking = True
for address, site in list(self.sites.items()):
gevent.spawn(self.checkSite, site, check_files)
self.portCheck()
if not self.port_opened["ipv4"]:
self.tor_manager.startOnions()
if not sites_checking:
check_pool = gevent.pool.Pool(5)
# Check sites integrity
for site in sorted(list(self.sites.values()), key=lambda site: site.settings.get("modified", 0), reverse=True):
if not site.isServing():
continue
check_thread = check_pool.spawn(self.checkSite, site, check_files) # Check in new thread
time.sleep(2)
if site.settings.get("modified", 0) < time.time() - 60 * 60 * 24: # Not so active site, wait some sec to finish
check_thread.join(timeout=5)
self.log.debug("Checksites done in %.3fs" % (time.time() - s))
def cleanupSites(self):
import gc
startup = True
time.sleep(5 * 60) # Sites already cleaned up on startup
peers_protected = set([])
while 1:
# Sites health care every 20 min
self.log.debug(
"Running site cleanup, connections: %s, internet: %s, protected peers: %s" %
(len(self.connections), self.has_internet, len(peers_protected))
)
for address, site in list(self.sites.items()):
if not site.isServing():
continue
if not startup:
site.cleanupPeers(peers_protected)
time.sleep(1) # Prevent too quick request
peers_protected = set([])
for address, site in list(self.sites.items()):
if not site.isServing():
continue
if site.peers:
with gevent.Timeout(10, exception=False):
site.announcer.announcePex()
# Last check modification failed
if site.content_updated is False:
site.update()
elif site.bad_files:
site.retryBadFiles()
if time.time() - site.settings.get("modified", 0) < 60 * 60 * 24 * 7:
# Keep active connections if site has been modified witin 7 days
connected_num = site.needConnections(check_site_on_reconnect=True)
if connected_num < config.connected_limit: # This site has small amount of peers, protect them from closing
peers_protected.update([peer.key for peer in site.getConnectedPeers()])
time.sleep(1) # Prevent too quick request
site = None
gc.collect() # Implicit garbage collection
startup = False
time.sleep(60 * 20)
def announceSite(self, site):
site.announce(mode="update", pex=False)
active_site = time.time() - site.settings.get("modified", 0) < 24 * 60 * 60
if site.settings["own"] or active_site:
# Check connections more frequently on own and active sites to speed-up first connections
site.needConnections(check_site_on_reconnect=True)
site.sendMyHashfield(3)
site.updateHashfield(3)
# Announce sites every 20 min
def announceSites(self):
time.sleep(5 * 60) # Sites already announced on startup
while 1:
config.loadTrackersFile()
s = time.time()
for address, site in list(self.sites.items()):
if not site.isServing():
continue
gevent.spawn(self.announceSite, site).join(timeout=10)
time.sleep(1)
taken = time.time() - s
# Query all trackers one-by-one in 20 minutes evenly distributed
sleep = max(0, 60 * 20 / len(config.trackers) - taken)
self.log.debug("Site announce tracker done in %.3fs, sleeping for %.3fs..." % (taken, sleep))
time.sleep(sleep)
# Detects if computer back from wakeup
def wakeupWatcher(self):
last_time = time.time()
last_my_ips = socket.gethostbyname_ex('')[2]
while 1:
time.sleep(30)
is_time_changed = time.time() - max(self.last_request, last_time) > 60 * 3
if is_time_changed:
# If taken more than 3 minute then the computer was in sleep mode
self.log.info(
"Wakeup detected: time warp from %0.f to %0.f (%0.f sleep seconds), acting like startup..." %
(last_time, time.time(), time.time() - last_time)
)
my_ips = socket.gethostbyname_ex('')[2]
is_ip_changed = my_ips != last_my_ips
if is_ip_changed:
self.log.info("IP change detected from %s to %s" % (last_my_ips, my_ips))
if is_time_changed or is_ip_changed:
self.checkSites(check_files=False, force_port_check=True)
last_time = time.time()
last_my_ips = my_ips
# Bind and start serving sites
def start(self, check_sites=True):
if self.stopping:
return False
ConnectionServer.start(self)
try:
self.stream_server.start()
except Exception as err:
self.log.error("Error listening on: %s:%s: %s" % (self.ip, self.port, err))
self.sites = self.site_manager.list()
if config.debug:
# Auto reload FileRequest on change
from Debug import DebugReloader
DebugReloader.watcher.addCallback(self.reload)
if check_sites: # Open port, Update sites, Check files integrity
gevent.spawn(self.checkSites)
thread_announce_sites = gevent.spawn(self.announceSites)
thread_cleanup_sites = gevent.spawn(self.cleanupSites)
thread_wakeup_watcher = gevent.spawn(self.wakeupWatcher)
ConnectionServer.listen(self)
self.log.debug("Stopped.")
def stop(self):
if self.running and self.portchecker.upnp_port_opened:
self.log.debug('Closing port %d' % self.port)
try:
self.portchecker.portClose(self.port)
self.log.info('Closed port via upnp.')
except Exception as err:
self.log.info("Failed at attempt to use upnp to close port: %s" % err)
return ConnectionServer.stop(self)

View File

@ -1,2 +0,0 @@
from .FileServer import FileServer
from .FileRequest import FileRequest

View File

@ -1,410 +0,0 @@
import logging
import time
import sys
import itertools
import collections
import gevent
import io
from Debug import Debug
from Config import config
from util import helper
from .PeerHashfield import PeerHashfield
from Plugin import PluginManager
if config.use_tempfiles:
import tempfile
# Communicate remote peers
@PluginManager.acceptPlugins
class Peer(object):
__slots__ = (
"ip", "port", "site", "key", "connection", "connection_server", "time_found", "time_response", "time_hashfield",
"time_added", "has_hashfield", "is_tracker_connection", "time_my_hashfield_sent", "last_ping", "reputation",
"last_content_json_update", "hashfield", "connection_error", "hash_failed", "download_bytes", "download_time"
)
def __init__(self, ip, port, site=None, connection_server=None):
self.ip = ip
self.port = port
self.site = site
self.key = "%s:%s" % (ip, port)
self.connection = None
self.connection_server = connection_server
self.has_hashfield = False # Lazy hashfield object not created yet
self.time_hashfield = None # Last time peer's hashfiled downloaded
self.time_my_hashfield_sent = None # Last time my hashfield sent to peer
self.time_found = time.time() # Time of last found in the torrent tracker
self.time_response = None # Time of last successful response from peer
self.time_added = time.time()
self.last_ping = None # Last response time for ping
self.is_tracker_connection = False # Tracker connection instead of normal peer
self.reputation = 0 # More likely to connect if larger
self.last_content_json_update = 0.0 # Modify date of last received content.json
self.connection_error = 0 # Series of connection error
self.hash_failed = 0 # Number of bad files from peer
self.download_bytes = 0 # Bytes downloaded
self.download_time = 0 # Time spent to download
def __getattr__(self, key):
if key == "hashfield":
self.has_hashfield = True
self.hashfield = PeerHashfield()
return self.hashfield
else:
return getattr(self, key)
def log(self, text):
if not config.verbose:
return # Only log if we are in debug mode
if self.site:
self.site.log.debug("%s:%s %s" % (self.ip, self.port, text))
else:
logging.debug("%s:%s %s" % (self.ip, self.port, text))
# Connect to host
def connect(self, connection=None):
if self.reputation < -10:
self.reputation = -10
if self.reputation > 10:
self.reputation = 10
if self.connection:
self.log("Getting connection (Closing %s)..." % self.connection)
self.connection.close("Connection change")
else:
self.log("Getting connection (reputation: %s)..." % self.reputation)
if connection: # Connection specified
self.log("Assigning connection %s" % connection)
self.connection = connection
self.connection.sites += 1
else: # Try to find from connection pool or create new connection
self.connection = None
try:
if self.connection_server:
connection_server = self.connection_server
elif self.site:
connection_server = self.site.connection_server
else:
import main
connection_server = main.file_server
self.connection = connection_server.getConnection(self.ip, self.port, site=self.site, is_tracker_connection=self.is_tracker_connection)
self.reputation += 1
self.connection.sites += 1
except Exception as err:
self.onConnectionError("Getting connection error")
self.log("Getting connection error: %s (connection_error: %s, hash_failed: %s)" %
(Debug.formatException(err), self.connection_error, self.hash_failed))
self.connection = None
return self.connection
# Check if we have connection to peer
def findConnection(self):
if self.connection and self.connection.connected: # We have connection to peer
return self.connection
else: # Try to find from other sites connections
self.connection = self.site.connection_server.getConnection(self.ip, self.port, create=False, site=self.site)
if self.connection:
self.connection.sites += 1
return self.connection
def __str__(self):
if self.site:
return "Peer:%-12s of %s" % (self.ip, self.site.address_short)
else:
return "Peer:%-12s" % self.ip
def __repr__(self):
return "<%s>" % self.__str__()
def packMyAddress(self):
if self.ip.endswith(".onion"):
return helper.packOnionAddress(self.ip, self.port)
else:
return helper.packAddress(self.ip, self.port)
# Found a peer from a source
def found(self, source="other"):
if self.reputation < 5:
if source == "tracker":
if self.ip.endswith(".onion"):
self.reputation += 1
else:
self.reputation += 2
elif source == "local":
self.reputation += 20
if source in ("tracker", "local"):
self.site.peers_recent.appendleft(self)
self.time_found = time.time()
# Send a command to peer and return response value
def request(self, cmd, params={}, stream_to=None):
if not self.connection or self.connection.closed:
self.connect()
if not self.connection:
self.onConnectionError("Reconnect error")
return None # Connection failed
self.log("Send request: %s %s %s %s" % (params.get("site", ""), cmd, params.get("inner_path", ""), params.get("location", "")))
for retry in range(1, 4): # Retry 3 times
try:
if not self.connection:
raise Exception("No connection found")
res = self.connection.request(cmd, params, stream_to)
if not res:
raise Exception("Send error")
if "error" in res:
self.log("%s error: %s" % (cmd, res["error"]))
self.onConnectionError("Response error")
break
else: # Successful request, reset connection error num
self.connection_error = 0
self.time_response = time.time()
if res:
return res
else:
raise Exception("Invalid response: %s" % res)
except Exception as err:
if type(err).__name__ == "Notify": # Greenlet killed by worker
self.log("Peer worker got killed: %s, aborting cmd: %s" % (err.message, cmd))
break
else:
self.onConnectionError("Request error")
self.log(
"%s (connection_error: %s, hash_failed: %s, retry: %s)" %
(Debug.formatException(err), self.connection_error, self.hash_failed, retry)
)
time.sleep(1 * retry)
self.connect()
return None # Failed after 4 retry
# Get a file content from peer
def getFile(self, site, inner_path, file_size=None, pos_from=0, pos_to=None, streaming=False):
if file_size and file_size > 5 * 1024 * 1024:
max_read_size = 1024 * 1024
else:
max_read_size = 512 * 1024
if pos_to:
read_bytes = min(max_read_size, pos_to - pos_from)
else:
read_bytes = max_read_size
location = pos_from
if config.use_tempfiles:
buff = tempfile.SpooledTemporaryFile(max_size=16 * 1024, mode='w+b')
else:
buff = io.BytesIO()
s = time.time()
while True: # Read in smaller parts
if config.stream_downloads or read_bytes > 256 * 1024 or streaming:
res = self.request("streamFile", {"site": site, "inner_path": inner_path, "location": location, "read_bytes": read_bytes, "file_size": file_size}, stream_to=buff)
if not res or "location" not in res: # Error
return False
else:
self.log("Send: %s" % inner_path)
res = self.request("getFile", {"site": site, "inner_path": inner_path, "location": location, "read_bytes": read_bytes, "file_size": file_size})
if not res or "location" not in res: # Error
return False
self.log("Recv: %s" % inner_path)
buff.write(res["body"])
res["body"] = None # Save memory
if res["location"] == res["size"] or res["location"] == pos_to: # End of file
break
else:
location = res["location"]
if pos_to:
read_bytes = min(max_read_size, pos_to - location)
if pos_to:
recv = pos_to - pos_from
else:
recv = res["location"]
self.download_bytes += recv
self.download_time += (time.time() - s)
if self.site:
self.site.settings["bytes_recv"] = self.site.settings.get("bytes_recv", 0) + recv
self.log("Downloaded: %s, pos: %s, read_bytes: %s" % (inner_path, buff.tell(), read_bytes))
buff.seek(0)
return buff
# Send a ping request
def ping(self):
response_time = None
for retry in range(1, 3): # Retry 3 times
s = time.time()
with gevent.Timeout(10.0, False): # 10 sec timeout, don't raise exception
res = self.request("ping")
if res and "body" in res and res["body"] == b"Pong!":
response_time = time.time() - s
break # All fine, exit from for loop
# Timeout reached or bad response
self.onConnectionError("Ping timeout")
self.connect()
time.sleep(1)
if response_time:
self.log("Ping: %.3f" % response_time)
else:
self.log("Ping failed")
self.last_ping = response_time
return response_time
# Request peer exchange from peer
def pex(self, site=None, need_num=5):
if not site:
site = self.site # If no site defined request peers for this site
# give back 5 connectible peers
packed_peers = helper.packPeers(self.site.getConnectablePeers(5, allow_private=False))
request = {"site": site.address, "peers": packed_peers["ipv4"], "need": need_num}
if packed_peers["onion"]:
request["peers_onion"] = packed_peers["onion"]
if packed_peers["ipv6"]:
request["peers_ipv6"] = packed_peers["ipv6"]
res = self.request("pex", request)
if not res or "error" in res:
return False
added = 0
# Remove unsupported peer types
if "peers_ipv6" in res and self.connection and "ipv6" not in self.connection.server.supported_ip_types:
del res["peers_ipv6"]
if "peers_onion" in res and self.connection and "onion" not in self.connection.server.supported_ip_types:
del res["peers_onion"]
# Add IPv4 + IPv6
for peer in itertools.chain(res.get("peers", []), res.get("peers_ipv6", [])):
address = helper.unpackAddress(peer)
if site.addPeer(*address, source="pex"):
added += 1
# Add Onion
for peer in res.get("peers_onion", []):
address = helper.unpackOnionAddress(peer)
if site.addPeer(*address, source="pex"):
added += 1
if added:
self.log("Added peers using pex: %s" % added)
return added
# List modified files since the date
# Return: {inner_path: modification date,...}
def listModified(self, since):
return self.request("listModified", {"since": since, "site": self.site.address})
def updateHashfield(self, force=False):
# Don't update hashfield again in 5 min
if self.time_hashfield and time.time() - self.time_hashfield < 5 * 60 and not force:
return False
self.time_hashfield = time.time()
res = self.request("getHashfield", {"site": self.site.address})
if not res or "error" in res or "hashfield_raw" not in res:
return False
self.hashfield.replaceFromBytes(res["hashfield_raw"])
return self.hashfield
# Find peers for hashids
# Return: {hash1: ["ip:port", "ip:port",...],...}
def findHashIds(self, hash_ids):
res = self.request("findHashIds", {"site": self.site.address, "hash_ids": hash_ids})
if not res or "error" in res or type(res) is not dict:
return False
back = collections.defaultdict(list)
for ip_type in ["ipv4", "ipv6", "onion"]:
if ip_type == "ipv4":
key = "peers"
else:
key = "peers_%s" % ip_type
for hash, peers in list(res.get(key, {}).items())[0:30]:
if ip_type == "onion":
unpacker_func = helper.unpackOnionAddress
else:
unpacker_func = helper.unpackAddress
back[hash] += list(map(unpacker_func, peers))
for hash in res.get("my", []):
if self.connection:
back[hash].append((self.connection.ip, self.connection.port))
else:
back[hash].append((self.ip, self.port))
return back
# Send my hashfield to peer
# Return: True if sent
def sendMyHashfield(self):
if self.connection and self.connection.handshake.get("rev", 0) < 510:
return False # Not supported
if self.time_my_hashfield_sent and self.site.content_manager.hashfield.time_changed <= self.time_my_hashfield_sent:
return False # Peer already has the latest hashfield
res = self.request("setHashfield", {"site": self.site.address, "hashfield_raw": self.site.content_manager.hashfield.tobytes()})
if not res or "error" in res:
return False
else:
self.time_my_hashfield_sent = time.time()
return True
def publish(self, address, inner_path, body, modified, diffs=[]):
if len(body) > 10 * 1024 and self.connection and self.connection.handshake.get("rev", 0) >= 4095:
# To save bw we don't push big content.json to peers
body = b""
return self.request("update", {
"site": address,
"inner_path": inner_path,
"body": body,
"modified": modified,
"diffs": diffs
})
# Stop and remove from site
def remove(self, reason="Removing"):
self.log("Removing peer...Connection error: %s, Hash failed: %s" % (self.connection_error, self.hash_failed))
if self.site and self.key in self.site.peers:
del(self.site.peers[self.key])
if self.site and self in self.site.peers_recent:
self.site.peers_recent.remove(self)
if self.connection:
self.connection.close(reason)
# - EVENTS -
# On connection error
def onConnectionError(self, reason="Unknown"):
self.connection_error += 1
if self.site and len(self.site.peers) > 200:
limit = 3
else:
limit = 6
self.reputation -= 1
if self.connection_error >= limit: # Dead peer
self.remove("Peer connection: %s" % reason)
# Done working with peer
def onWorkerDone(self):
pass

View File

@ -1,75 +0,0 @@
import array
import time
class PeerHashfield(object):
__slots__ = ("storage", "time_changed", "append", "remove", "tobytes", "frombytes", "__len__", "__iter__")
def __init__(self):
self.storage = self.createStorage()
self.time_changed = time.time()
def createStorage(self):
storage = array.array("H")
self.append = storage.append
self.remove = storage.remove
self.tobytes = storage.tobytes
self.frombytes = storage.frombytes
self.__len__ = storage.__len__
self.__iter__ = storage.__iter__
return storage
def appendHash(self, hash):
hash_id = int(hash[0:4], 16)
if hash_id not in self.storage:
self.storage.append(hash_id)
self.time_changed = time.time()
return True
else:
return False
def appendHashId(self, hash_id):
if hash_id not in self.storage:
self.storage.append(hash_id)
self.time_changed = time.time()
return True
else:
return False
def removeHash(self, hash):
hash_id = int(hash[0:4], 16)
if hash_id in self.storage:
self.storage.remove(hash_id)
self.time_changed = time.time()
return True
else:
return False
def removeHashId(self, hash_id):
if hash_id in self.storage:
self.storage.remove(hash_id)
self.time_changed = time.time()
return True
else:
return False
def getHashId(self, hash):
return int(hash[0:4], 16)
def hasHash(self, hash):
return int(hash[0:4], 16) in self.storage
def replaceFromBytes(self, hashfield_raw):
self.storage = self.createStorage()
self.storage.frombytes(hashfield_raw)
self.time_changed = time.time()
if __name__ == "__main__":
field = PeerHashfield()
s = time.time()
for i in range(10000):
field.appendHashId(i)
print(time.time()-s)
s = time.time()
for i in range(10000):
field.hasHash("AABB")
print(time.time()-s)

View File

@ -1,189 +0,0 @@
import logging
import urllib.request
import urllib.parse
import re
import time
from Debug import Debug
from util import UpnpPunch
class PeerPortchecker(object):
checker_functions = {
"ipv4": ["checkIpfingerprints", "checkCanyouseeme"],
"ipv6": ["checkMyaddr", "checkIpv6scanner"]
}
def __init__(self, file_server):
self.log = logging.getLogger("PeerPortchecker")
self.upnp_port_opened = False
self.file_server = file_server
def requestUrl(self, url, post_data=None):
if type(post_data) is dict:
post_data = urllib.parse.urlencode(post_data).encode("utf8")
req = urllib.request.Request(url, post_data)
req.add_header("Referer", url)
req.add_header("User-Agent", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11")
req.add_header("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8")
return urllib.request.urlopen(req, timeout=20.0)
def portOpen(self, port):
self.log.info("Trying to open port using UpnpPunch...")
try:
UpnpPunch.ask_to_open_port(port, 'ZeroNet', retries=3, protos=["TCP"])
self.upnp_port_opened = True
except Exception as err:
self.log.warning("UpnpPunch run error: %s" % Debug.formatException(err))
return False
return True
def portClose(self, port):
return UpnpPunch.ask_to_close_port(port, protos=["TCP"])
def portCheck(self, port, ip_type="ipv4"):
checker_functions = self.checker_functions[ip_type]
for func_name in checker_functions:
func = getattr(self, func_name)
s = time.time()
try:
res = func(port)
if res:
self.log.info(
"Checked port %s (%s) using %s result: %s in %.3fs" %
(port, ip_type, func_name, res, time.time() - s)
)
time.sleep(0.1)
if res["opened"] and not self.file_server.had_external_incoming:
res["opened"] = False
self.log.warning("Port %s:%s looks opened, but no incoming connection" % (res["ip"], port))
break
except Exception as err:
self.log.warning(
"%s check error: %s in %.3fs" %
(func_name, Debug.formatException(err), time.time() - s)
)
res = {"ip": None, "opened": False}
return res
def checkCanyouseeme(self, port):
data = urllib.request.urlopen("https://www.canyouseeme.org/", b"ip=1.1.1.1&port=%s" % str(port).encode("ascii"), timeout=20.0).read().decode("utf8")
message = re.match(r'.*<p style="padding-left:15px">(.*?)</p>', data, re.DOTALL).group(1)
message = re.sub(r"<.*?>", "", message.replace("<br>", " ").replace("&nbsp;", " ")) # Strip http tags
match = re.match(r".*service on (.*?) on", message)
if match:
ip = match.group(1)
else:
raise Exception("Invalid response: %s" % message)
if "Success" in message:
return {"ip": ip, "opened": True}
elif "Error" in message:
return {"ip": ip, "opened": False}
else:
raise Exception("Invalid response: %s" % message)
def checkIpfingerprints(self, port):
data = self.requestUrl("https://www.ipfingerprints.com/portscan.php").read().decode("utf8")
ip = re.match(r'.*name="remoteHost".*?value="(.*?)"', data, re.DOTALL).group(1)
post_data = {
"remoteHost": ip, "start_port": port, "end_port": port,
"normalScan": "Yes", "scan_type": "connect2", "ping_type": "none"
}
message = self.requestUrl("https://www.ipfingerprints.com/scripts/getPortsInfo.php", post_data).read().decode("utf8")
if "open" in message:
return {"ip": ip, "opened": True}
elif "filtered" in message or "closed" in message:
return {"ip": ip, "opened": False}
else:
raise Exception("Invalid response: %s" % message)
def checkMyaddr(self, port):
url = "http://ipv6.my-addr.com/online-ipv6-port-scan.php"
data = self.requestUrl(url).read().decode("utf8")
ip = re.match(r'.*Your IP address is:[ ]*([0-9\.:a-z]+)', data.replace("&nbsp;", ""), re.DOTALL).group(1)
post_data = {"addr": ip, "ports_selected": "", "ports_list": port}
data = self.requestUrl(url, post_data).read().decode("utf8")
message = re.match(r".*<table class='table_font_16'>(.*?)</table>", data, re.DOTALL).group(1)
if "ok.png" in message:
return {"ip": ip, "opened": True}
elif "fail.png" in message:
return {"ip": ip, "opened": False}
else:
raise Exception("Invalid response: %s" % message)
def checkIpv6scanner(self, port):
url = "http://www.ipv6scanner.com/cgi-bin/main.py"
data = self.requestUrl(url).read().decode("utf8")
ip = re.match(r'.*Your IP address is[ ]*([0-9\.:a-z]+)', data.replace("&nbsp;", ""), re.DOTALL).group(1)
post_data = {"host": ip, "scanType": "1", "port": port, "protocol": "tcp", "authorized": "yes"}
data = self.requestUrl(url, post_data).read().decode("utf8")
message = re.match(r".*<table id='scantable'>(.*?)</table>", data, re.DOTALL).group(1)
message_text = re.sub("<.*?>", " ", message.replace("<br>", " ").replace("&nbsp;", " ").strip()) # Strip http tags
if "OPEN" in message_text:
return {"ip": ip, "opened": True}
elif "CLOSED" in message_text or "FILTERED" in message_text:
return {"ip": ip, "opened": False}
else:
raise Exception("Invalid response: %s" % message_text)
def checkPortchecker(self, port): # Not working: Forbidden
data = self.requestUrl("https://portchecker.co").read().decode("utf8")
csrf = re.match(r'.*name="_csrf" value="(.*?)"', data, re.DOTALL).group(1)
data = self.requestUrl("https://portchecker.co", {"port": port, "_csrf": csrf}).read().decode("utf8")
message = re.match(r'.*<div id="results-wrapper">(.*?)</div>', data, re.DOTALL).group(1)
message = re.sub(r"<.*?>", "", message.replace("<br>", " ").replace("&nbsp;", " ").strip()) # Strip http tags
match = re.match(r".*targetIP.*?value=\"(.*?)\"", data, re.DOTALL)
if match:
ip = match.group(1)
else:
raise Exception("Invalid response: %s" % message)
if "open" in message:
return {"ip": ip, "opened": True}
elif "closed" in message:
return {"ip": ip, "opened": False}
else:
raise Exception("Invalid response: %s" % message)
def checkSubnetonline(self, port): # Not working: Invalid response
url = "https://www.subnetonline.com/pages/ipv6-network-tools/online-ipv6-port-scanner.php"
data = self.requestUrl(url).read().decode("utf8")
ip = re.match(r'.*Your IP is.*?name="host".*?value="(.*?)"', data, re.DOTALL).group(1)
token = re.match(r'.*name="token".*?value="(.*?)"', data, re.DOTALL).group(1)
post_data = {"host": ip, "port": port, "allow": "on", "token": token, "submit": "Scanning.."}
data = self.requestUrl(url, post_data).read().decode("utf8")
print(post_data, data)
message = re.match(r".*<div class='formfield'>(.*?)</div>", data, re.DOTALL).group(1)
message = re.sub(r"<.*?>", "", message.replace("<br>", " ").replace("&nbsp;", " ").strip()) # Strip http tags
if "online" in message:
return {"ip": ip, "opened": True}
elif "closed" in message:
return {"ip": ip, "opened": False}
else:
raise Exception("Invalid response: %s" % message)

View File

@ -1,2 +0,0 @@
from .Peer import Peer
from .PeerHashfield import PeerHashfield

View File

@ -1,287 +0,0 @@
import logging
import os
import sys
import shutil
import time
from collections import defaultdict
import importlib
import json
from Debug import Debug
from Config import config
import plugins
class PluginManager:
def __init__(self):
self.log = logging.getLogger("PluginManager")
self.path_plugins = os.path.abspath(os.path.dirname(plugins.__file__))
self.path_installed_plugins = config.data_dir + "/__plugins__"
self.plugins = defaultdict(list) # Registered plugins (key: class name, value: list of plugins for class)
self.subclass_order = {} # Record the load order of the plugins, to keep it after reload
self.pluggable = {}
self.plugin_names = [] # Loaded plugin names
self.plugins_updated = {} # List of updated plugins since restart
self.plugins_rev = {} # Installed plugins revision numbers
self.after_load = [] # Execute functions after loaded plugins
self.function_flags = {} # Flag function for permissions
self.reloading = False
self.config_path = config.data_dir + "/plugins.json"
self.loadConfig()
self.config.setdefault("builtin", {})
sys.path.append(os.path.join(os.getcwd(), self.path_plugins))
self.migratePlugins()
if config.debug: # Auto reload Plugins on file change
from Debug import DebugReloader
DebugReloader.watcher.addCallback(self.reloadPlugins)
def loadConfig(self):
if os.path.isfile(self.config_path):
try:
self.config = json.load(open(self.config_path, encoding="utf8"))
except Exception as err:
self.log.error("Error loading %s: %s" % (self.config_path, err))
self.config = {}
else:
self.config = {}
def saveConfig(self):
f = open(self.config_path, "w", encoding="utf8")
json.dump(self.config, f, ensure_ascii=False, sort_keys=True, indent=2)
def migratePlugins(self):
for dir_name in os.listdir(self.path_plugins):
if dir_name == "Mute":
self.log.info("Deleting deprecated/renamed plugin: %s" % dir_name)
shutil.rmtree("%s/%s" % (self.path_plugins, dir_name))
# -- Load / Unload --
def listPlugins(self, list_disabled=False):
plugins = []
for dir_name in sorted(os.listdir(self.path_plugins)):
dir_path = os.path.join(self.path_plugins, dir_name)
plugin_name = dir_name.replace("disabled-", "")
if dir_name.startswith("disabled"):
is_enabled = False
else:
is_enabled = True
plugin_config = self.config["builtin"].get(plugin_name, {})
if "enabled" in plugin_config:
is_enabled = plugin_config["enabled"]
if dir_name == "__pycache__" or not os.path.isdir(dir_path):
continue # skip
if dir_name.startswith("Debug") and not config.debug:
continue # Only load in debug mode if module name starts with Debug
if not is_enabled and not list_disabled:
continue # Dont load if disabled
plugin = {}
plugin["source"] = "builtin"
plugin["name"] = plugin_name
plugin["dir_name"] = dir_name
plugin["dir_path"] = dir_path
plugin["inner_path"] = plugin_name
plugin["enabled"] = is_enabled
plugin["rev"] = config.rev
plugin["loaded"] = plugin_name in self.plugin_names
plugins.append(plugin)
plugins += self.listInstalledPlugins(list_disabled)
return plugins
def listInstalledPlugins(self, list_disabled=False):
plugins = []
for address, site_plugins in sorted(self.config.items()):
if address == "builtin":
continue
for plugin_inner_path, plugin_config in sorted(site_plugins.items()):
is_enabled = plugin_config.get("enabled", False)
if not is_enabled and not list_disabled:
continue
plugin_name = os.path.basename(plugin_inner_path)
dir_path = "%s/%s/%s" % (self.path_installed_plugins, address, plugin_inner_path)
plugin = {}
plugin["source"] = address
plugin["name"] = plugin_name
plugin["dir_name"] = plugin_name
plugin["dir_path"] = dir_path
plugin["inner_path"] = plugin_inner_path
plugin["enabled"] = is_enabled
plugin["rev"] = plugin_config.get("rev", 0)
plugin["loaded"] = plugin_name in self.plugin_names
plugins.append(plugin)
return plugins
# Load all plugin
def loadPlugins(self):
all_loaded = True
s = time.time()
for plugin in self.listPlugins():
self.log.debug("Loading plugin: %s (%s)" % (plugin["name"], plugin["source"]))
if plugin["source"] != "builtin":
self.plugins_rev[plugin["name"]] = plugin["rev"]
site_plugin_dir = os.path.dirname(plugin["dir_path"])
if site_plugin_dir not in sys.path:
sys.path.append(site_plugin_dir)
try:
sys.modules[plugin["name"]] = __import__(plugin["dir_name"])
except Exception as err:
self.log.error("Plugin %s load error: %s" % (plugin["name"], Debug.formatException(err)))
all_loaded = False
if plugin["name"] not in self.plugin_names:
self.plugin_names.append(plugin["name"])
self.log.debug("Plugins loaded in %.3fs" % (time.time() - s))
for func in self.after_load:
func()
return all_loaded
# Reload all plugins
def reloadPlugins(self):
self.reloading = True
self.after_load = []
self.plugins_before = self.plugins
self.plugins = defaultdict(list) # Reset registered plugins
for module_name, module in list(sys.modules.items()):
if not module or not getattr(module, "__file__", None):
continue
if self.path_plugins not in module.__file__ and self.path_installed_plugins not in module.__file__:
continue
if "allow_reload" in dir(module) and not module.allow_reload: # Reload disabled
# Re-add non-reloadable plugins
for class_name, classes in self.plugins_before.items():
for c in classes:
if c.__module__ != module.__name__:
continue
self.plugins[class_name].append(c)
else:
try:
importlib.reload(module)
except Exception as err:
self.log.error("Plugin %s reload error: %s" % (module_name, Debug.formatException(err)))
self.loadPlugins() # Load new plugins
# Change current classes in memory
import gc
patched = {}
for class_name, classes in self.plugins.items():
classes = classes[:] # Copy the current plugins
classes.reverse()
base_class = self.pluggable[class_name] # Original class
classes.append(base_class) # Add the class itself to end of inherience line
plugined_class = type(class_name, tuple(classes), dict()) # Create the plugined class
for obj in gc.get_objects():
if type(obj).__name__ == class_name:
obj.__class__ = plugined_class
patched[class_name] = patched.get(class_name, 0) + 1
self.log.debug("Patched objects: %s" % patched)
# Change classes in modules
patched = {}
for class_name, classes in self.plugins.items():
for module_name, module in list(sys.modules.items()):
if class_name in dir(module):
if "__class__" not in dir(getattr(module, class_name)): # Not a class
continue
base_class = self.pluggable[class_name]
classes = self.plugins[class_name][:]
classes.reverse()
classes.append(base_class)
plugined_class = type(class_name, tuple(classes), dict())
setattr(module, class_name, plugined_class)
patched[class_name] = patched.get(class_name, 0) + 1
self.log.debug("Patched modules: %s" % patched)
self.reloading = False
plugin_manager = PluginManager() # Singletone
# -- Decorators --
# Accept plugin to class decorator
def acceptPlugins(base_class):
class_name = base_class.__name__
plugin_manager.pluggable[class_name] = base_class
if class_name in plugin_manager.plugins: # Has plugins
classes = plugin_manager.plugins[class_name][:] # Copy the current plugins
# Restore the subclass order after reload
if class_name in plugin_manager.subclass_order:
classes = sorted(
classes,
key=lambda key:
plugin_manager.subclass_order[class_name].index(str(key))
if str(key) in plugin_manager.subclass_order[class_name]
else 9999
)
plugin_manager.subclass_order[class_name] = list(map(str, classes))
classes.reverse()
classes.append(base_class) # Add the class itself to end of inherience line
plugined_class = type(class_name, tuple(classes), dict()) # Create the plugined class
plugin_manager.log.debug("New class accepts plugins: %s (Loaded plugins: %s)" % (class_name, classes))
else: # No plugins just use the original
plugined_class = base_class
return plugined_class
# Register plugin to class name decorator
def registerTo(class_name):
if config.debug and not plugin_manager.reloading:
import gc
for obj in gc.get_objects():
if type(obj).__name__ == class_name:
raise Exception("Class %s instances already present in memory" % class_name)
break
plugin_manager.log.debug("New plugin registered to: %s" % class_name)
if class_name not in plugin_manager.plugins:
plugin_manager.plugins[class_name] = []
def classDecorator(self):
plugin_manager.plugins[class_name].append(self)
return self
return classDecorator
def afterLoad(func):
plugin_manager.after_load.append(func)
return func
# - Example usage -
if __name__ == "__main__":
@registerTo("Request")
class RequestPlugin(object):
def actionMainPage(self, path):
return "Hello MainPage!"
@acceptPlugins
class Request(object):
def route(self, path):
func = getattr(self, "action" + path, None)
if func:
return func(path)
else:
return "Can't route to", path
print(Request().route("MainPage"))

View File

File diff suppressed because it is too large Load Diff

View File

@ -1,293 +0,0 @@
import random
import time
import hashlib
import re
import collections
import gevent
from Plugin import PluginManager
from Config import config
from Debug import Debug
from util import helper
from greenlet import GreenletExit
import util
class AnnounceError(Exception):
pass
global_stats = collections.defaultdict(lambda: collections.defaultdict(int))
@PluginManager.acceptPlugins
class SiteAnnouncer(object):
def __init__(self, site):
self.site = site
self.stats = {}
self.fileserver_port = config.fileserver_port
self.peer_id = self.site.connection_server.peer_id
self.last_tracker_id = random.randint(0, 10)
self.time_last_announce = 0
def getTrackers(self):
return config.trackers
def getSupportedTrackers(self):
trackers = self.getTrackers()
if not self.site.connection_server.tor_manager.enabled:
trackers = [tracker for tracker in trackers if ".onion" not in tracker]
trackers = [tracker for tracker in trackers if self.getAddressParts(tracker)] # Remove trackers with unknown address
if "ipv6" not in self.site.connection_server.supported_ip_types:
trackers = [tracker for tracker in trackers if helper.getIpType(self.getAddressParts(tracker)["ip"]) != "ipv6"]
return trackers
def getAnnouncingTrackers(self, mode):
trackers = self.getSupportedTrackers()
if trackers and (mode == "update" or mode == "more"): # Only announce on one tracker, increment the queried tracker id
self.last_tracker_id += 1
self.last_tracker_id = self.last_tracker_id % len(trackers)
trackers_announcing = [trackers[self.last_tracker_id]] # We only going to use this one
else:
trackers_announcing = trackers
return trackers_announcing
def getOpenedServiceTypes(self):
back = []
# Type of addresses they can reach me
if config.trackers_proxy == "disable" and config.tor != "always":
for ip_type, opened in list(self.site.connection_server.port_opened.items()):
if opened:
back.append(ip_type)
if self.site.connection_server.tor_manager.start_onions:
back.append("onion")
return back
@util.Noparallel(blocking=False)
def announce(self, force=False, mode="start", pex=True):
if time.time() - self.time_last_announce < 30 and not force:
return # No reannouncing within 30 secs
if force:
self.site.log.debug("Force reannounce in mode %s" % mode)
self.fileserver_port = config.fileserver_port
self.time_last_announce = time.time()
trackers = self.getAnnouncingTrackers(mode)
if config.verbose:
self.site.log.debug("Tracker announcing, trackers: %s" % trackers)
errors = []
slow = []
s = time.time()
threads = []
num_announced = 0
for tracker in trackers: # Start announce threads
tracker_stats = global_stats[tracker]
# Reduce the announce time for trackers that looks unreliable
time_announce_allowed = time.time() - 60 * min(30, tracker_stats["num_error"])
if tracker_stats["num_error"] > 5 and tracker_stats["time_request"] > time_announce_allowed and not force:
if config.verbose:
self.site.log.debug("Tracker %s looks unreliable, announce skipped (error: %s)" % (tracker, tracker_stats["num_error"]))
continue
thread = self.site.greenlet_manager.spawn(self.announceTracker, tracker, mode=mode)
threads.append(thread)
thread.tracker = tracker
time.sleep(0.01)
self.updateWebsocket(trackers="announcing")
gevent.joinall(threads, timeout=20) # Wait for announce finish
for thread in threads:
if thread.value is None:
continue
if thread.value is not False:
if thread.value > 1.0: # Takes more than 1 second to announce
slow.append("%.2fs %s" % (thread.value, thread.tracker))
num_announced += 1
else:
if thread.ready():
errors.append(thread.tracker)
else: # Still running
slow.append("30s+ %s" % thread.tracker)
# Save peers num
self.site.settings["peers"] = len(self.site.peers)
if len(errors) < len(threads): # At least one tracker finished
if len(trackers) == 1:
announced_to = trackers[0]
else:
announced_to = "%s/%s trackers" % (num_announced, len(threads))
if mode != "update" or config.verbose:
self.site.log.debug(
"Announced in mode %s to %s in %.3fs, errors: %s, slow: %s" %
(mode, announced_to, time.time() - s, errors, slow)
)
else:
if len(threads) > 1:
self.site.log.error("Announce to %s trackers in %.3fs, failed" % (len(threads), time.time() - s))
if len(threads) == 1 and mode != "start": # Move to next tracker
self.site.log.debug("Tracker failed, skipping to next one...")
self.site.greenlet_manager.spawnLater(1.0, self.announce, force=force, mode=mode, pex=pex)
self.updateWebsocket(trackers="announced")
if pex:
self.updateWebsocket(pex="announcing")
if mode == "more": # Need more peers
self.announcePex(need_num=10)
else:
self.announcePex()
self.updateWebsocket(pex="announced")
def getTrackerHandler(self, protocol):
return None
def getAddressParts(self, tracker):
if "://" not in tracker or not re.match("^[A-Za-z0-9:/\\.#-]+$", tracker):
return None
protocol, address = tracker.split("://", 1)
if ":" in address:
ip, port = address.rsplit(":", 1)
else:
ip = address
if protocol.startswith("https"):
port = 443
else:
port = 80
back = {}
back["protocol"] = protocol
back["address"] = address
back["ip"] = ip
back["port"] = port
return back
def announceTracker(self, tracker, mode="start", num_want=10):
s = time.time()
address_parts = self.getAddressParts(tracker)
if not address_parts:
self.site.log.warning("Tracker %s error: Invalid address" % tracker)
return False
if tracker not in self.stats:
self.stats[tracker] = {"status": "", "num_request": 0, "num_success": 0, "num_error": 0, "time_request": 0, "time_last_error": 0}
last_status = self.stats[tracker]["status"]
self.stats[tracker]["status"] = "announcing"
self.stats[tracker]["time_request"] = time.time()
global_stats[tracker]["time_request"] = time.time()
if config.verbose:
self.site.log.debug("Tracker announcing to %s (mode: %s)" % (tracker, mode))
if mode == "update":
num_want = 10
else:
num_want = 30
handler = self.getTrackerHandler(address_parts["protocol"])
error = None
try:
if handler:
peers = handler(address_parts["address"], mode=mode, num_want=num_want)
else:
raise AnnounceError("Unknown protocol: %s" % address_parts["protocol"])
except Exception as err:
self.site.log.warning("Tracker %s announce failed: %s in mode %s" % (tracker, Debug.formatException(err), mode))
error = err
if error:
self.stats[tracker]["status"] = "error"
self.stats[tracker]["time_status"] = time.time()
self.stats[tracker]["last_error"] = str(error)
self.stats[tracker]["time_last_error"] = time.time()
if self.site.connection_server.has_internet:
self.stats[tracker]["num_error"] += 1
self.stats[tracker]["num_request"] += 1
global_stats[tracker]["num_request"] += 1
if self.site.connection_server.has_internet:
global_stats[tracker]["num_error"] += 1
self.updateWebsocket(tracker="error")
return False
if peers is None: # Announce skipped
self.stats[tracker]["time_status"] = time.time()
self.stats[tracker]["status"] = last_status
return None
self.stats[tracker]["status"] = "announced"
self.stats[tracker]["time_status"] = time.time()
self.stats[tracker]["num_success"] += 1
self.stats[tracker]["num_request"] += 1
global_stats[tracker]["num_request"] += 1
global_stats[tracker]["num_error"] = 0
if peers is True: # Announce success, but no peers returned
return time.time() - s
# Adding peers
added = 0
for peer in peers:
if peer["port"] == 1: # Some trackers does not accept port 0, so we send port 1 as not-connectable
peer["port"] = 0
if not peer["port"]:
continue # Dont add peers with port 0
if self.site.addPeer(peer["addr"], peer["port"], source="tracker"):
added += 1
if added:
self.site.worker_manager.onPeers()
self.site.updateWebsocket(peers_added=added)
if config.verbose:
self.site.log.debug(
"Tracker result: %s://%s (found %s peers, new: %s, total: %s)" %
(address_parts["protocol"], address_parts["address"], len(peers), added, len(self.site.peers))
)
return time.time() - s
@util.Noparallel(blocking=False)
def announcePex(self, query_num=2, need_num=5):
peers = self.site.getConnectedPeers()
if len(peers) == 0: # Wait 3s for connections
time.sleep(3)
peers = self.site.getConnectedPeers()
if len(peers) == 0: # Small number of connected peers for this site, connect to any
peers = list(self.site.getRecentPeers(20))
need_num = 10
random.shuffle(peers)
done = 0
total_added = 0
for peer in peers:
num_added = peer.pex(need_num=need_num)
if num_added is not False:
done += 1
total_added += num_added
if num_added:
self.site.worker_manager.onPeers()
self.site.updateWebsocket(peers_added=num_added)
else:
time.sleep(0.1)
if done == query_num:
break
self.site.log.debug("Pex result: from %s peers got %s new peers." % (done, total_added))
def updateWebsocket(self, **kwargs):
if kwargs:
param = {"event": list(kwargs.items())[0]}
else:
param = None
for ws in self.site.websockets:
ws.event("announcerChanged", self.site, param)

View File

@ -1,226 +0,0 @@
import json
import logging
import re
import os
import time
import atexit
import gevent
import util
from Plugin import PluginManager
from Content import ContentDb
from Config import config
from util import helper
from util import RateLimit
from util import Cached
@PluginManager.acceptPlugins
class SiteManager(object):
def __init__(self):
self.log = logging.getLogger("SiteManager")
self.log.debug("SiteManager created.")
self.sites = {}
self.sites_changed = int(time.time())
self.loaded = False
gevent.spawn(self.saveTimer)
atexit.register(lambda: self.save(recalculate_size=True))
# Load all sites from data/sites.json
@util.Noparallel()
def load(self, cleanup=True, startup=False):
from Debug import Debug
self.log.info("Loading sites... (cleanup: %s, startup: %s)" % (cleanup, startup))
self.loaded = False
from .Site import Site
address_found = []
added = 0
load_s = time.time()
# Load new adresses
try:
json_path = "%s/sites.json" % config.data_dir
data = json.load(open(json_path))
except Exception as err:
raise Exception("Unable to load %s: %s" % (json_path, err))
sites_need = []
for address, settings in data.items():
if address not in self.sites:
if os.path.isfile("%s/%s/content.json" % (config.data_dir, address)):
# Root content.json exists, try load site
s = time.time()
try:
site = Site(address, settings=settings)
site.content_manager.contents.get("content.json")
except Exception as err:
self.log.debug("Error loading site %s: %s" % (address, err))
continue
self.sites[address] = site
self.log.debug("Loaded site %s in %.3fs" % (address, time.time() - s))
added += 1
elif startup:
# No site directory, start download
self.log.debug("Found new site in sites.json: %s" % address)
sites_need.append([address, settings])
added += 1
address_found.append(address)
# Remove deleted adresses
if cleanup:
for address in list(self.sites.keys()):
if address not in address_found:
del(self.sites[address])
self.log.debug("Removed site: %s" % address)
# Remove orpan sites from contentdb
content_db = ContentDb.getContentDb()
for row in content_db.execute("SELECT * FROM site").fetchall():
address = row["address"]
if address not in self.sites and address not in address_found:
self.log.info("Deleting orphan site from content.db: %s" % address)
try:
content_db.execute("DELETE FROM site WHERE ?", {"address": address})
except Exception as err:
self.log.error("Can't delete site %s from content_db: %s" % (address, err))
if address in content_db.site_ids:
del content_db.site_ids[address]
if address in content_db.sites:
del content_db.sites[address]
self.loaded = True
for address, settings in sites_need:
gevent.spawn(self.need, address, settings=settings)
if added:
self.log.info("Added %s sites in %.3fs" % (added, time.time() - load_s))
def saveDelayed(self):
RateLimit.callAsync("Save sites.json", allowed_again=5, func=self.save)
def save(self, recalculate_size=False):
if not self.sites:
self.log.debug("Save skipped: No sites found")
return
if not self.loaded:
self.log.debug("Save skipped: Not loaded")
return
s = time.time()
data = {}
# Generate data file
s = time.time()
for address, site in list(self.list().items()):
if recalculate_size:
site.settings["size"], site.settings["size_optional"] = site.content_manager.getTotalSize() # Update site size
data[address] = site.settings
data[address]["cache"] = site.getSettingsCache()
time_generate = time.time() - s
s = time.time()
if data:
helper.atomicWrite("%s/sites.json" % config.data_dir, helper.jsonDumps(data).encode("utf8"))
else:
self.log.debug("Save error: No data")
time_write = time.time() - s
# Remove cache from site settings
for address, site in self.list().items():
site.settings["cache"] = {}
self.log.debug("Saved sites in %.2fs (generate: %.2fs, write: %.2fs)" % (time.time() - s, time_generate, time_write))
def saveTimer(self):
while 1:
time.sleep(60 * 10)
self.save(recalculate_size=True)
# Checks if its a valid address
def isAddress(self, address):
return re.match("^[A-Za-z0-9]{26,35}$", address)
def isDomain(self, address):
return False
@Cached(timeout=10)
def isDomainCached(self, address):
return self.isDomain(address)
def resolveDomain(self, domain):
return False
@Cached(timeout=10)
def resolveDomainCached(self, domain):
return self.resolveDomain(domain)
# Return: Site object or None if not found
def get(self, address):
if self.isDomainCached(address):
address_resolved = self.resolveDomainCached(address)
if address_resolved:
address = address_resolved
if not self.loaded: # Not loaded yet
self.log.debug("Loading site: %s)..." % address)
self.load()
site = self.sites.get(address)
return site
def add(self, address, all_file=True, settings=None, **kwargs):
from .Site import Site
self.sites_changed = int(time.time())
# Try to find site with differect case
for recover_address, recover_site in list(self.sites.items()):
if recover_address.lower() == address.lower():
return recover_site
if not self.isAddress(address):
return False # Not address: %s % address
self.log.debug("Added new site: %s" % address)
config.loadTrackersFile()
site = Site(address, settings=settings)
self.sites[address] = site
if not site.settings["serving"]: # Maybe it was deleted before
site.settings["serving"] = True
site.saveSettings()
if all_file: # Also download user files on first sync
site.download(check_size=True, blind_includes=True)
return site
# Return or create site and start download site files
def need(self, address, *args, **kwargs):
if self.isDomainCached(address):
address_resolved = self.resolveDomainCached(address)
if address_resolved:
address = address_resolved
site = self.get(address)
if not site: # Site not exist yet
site = self.add(address, *args, **kwargs)
return site
def delete(self, address):
self.sites_changed = int(time.time())
self.log.debug("Deleted site: %s" % address)
del(self.sites[address])
# Delete from sites.json
self.save()
# Lazy load sites
def list(self):
if not self.loaded: # Not loaded yet
self.log.debug("Sites not loaded yet...")
self.load(startup=True)
return self.sites
site_manager = SiteManager() # Singletone
if config.action == "main": # Don't connect / add myself to peerlist
peer_blacklist = [("127.0.0.1", config.fileserver_port), ("::1", config.fileserver_port)]
else:
peer_blacklist = []

View File

@ -1,631 +0,0 @@
import os
import re
import shutil
import json
import time
import errno
from collections import defaultdict
import sqlite3
import gevent.event
import util
from util import SafeRe
from Db.Db import Db
from Debug import Debug
from Config import config
from util import helper
from util import ThreadPool
from Plugin import PluginManager
from Translate import translate as _
thread_pool_fs_read = ThreadPool.ThreadPool(config.threads_fs_read, name="FS read")
thread_pool_fs_write = ThreadPool.ThreadPool(config.threads_fs_write, name="FS write")
thread_pool_fs_batch = ThreadPool.ThreadPool(1, name="FS batch")
@PluginManager.acceptPlugins
class SiteStorage(object):
def __init__(self, site, allow_create=True):
self.site = site
self.directory = "%s/%s" % (config.data_dir, self.site.address) # Site data diretory
self.allowed_dir = os.path.abspath(self.directory) # Only serve file within this dir
self.log = site.log
self.db = None # Db class
self.db_checked = False # Checked db tables since startup
self.event_db_busy = None # Gevent AsyncResult if db is working on rebuild
self.has_db = self.isFile("dbschema.json") # The site has schema
if not os.path.isdir(self.directory):
if allow_create:
os.mkdir(self.directory) # Create directory if not found
else:
raise Exception("Directory not exists: %s" % self.directory)
def getDbFile(self):
if self.db:
return self.db.schema["db_file"]
else:
if self.isFile("dbschema.json"):
schema = self.loadJson("dbschema.json")
return schema["db_file"]
else:
return False
# Create new databaseobject with the site's schema
def openDb(self, close_idle=False):
schema = self.getDbSchema()
db_path = self.getPath(schema["db_file"])
return Db(schema, db_path, close_idle=close_idle)
def closeDb(self, reason="Unknown (SiteStorage)"):
if self.db:
self.db.close(reason)
self.event_db_busy = None
self.db = None
def getDbSchema(self):
try:
self.site.needFile("dbschema.json")
schema = self.loadJson("dbschema.json")
except Exception as err:
raise Exception("dbschema.json is not a valid JSON: %s" % err)
return schema
def loadDb(self):
self.log.debug("No database, waiting for dbschema.json...")
self.site.needFile("dbschema.json", priority=3)
self.log.debug("Got dbschema.json")
self.has_db = self.isFile("dbschema.json") # Recheck if dbschema exist
if self.has_db:
schema = self.getDbSchema()
db_path = self.getPath(schema["db_file"])
if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0:
try:
self.rebuildDb(reason="Missing database")
except Exception as err:
self.log.error(err)
pass
if self.db:
self.db.close("Gettig new db for SiteStorage")
self.db = self.openDb(close_idle=True)
try:
changed_tables = self.db.checkTables()
if changed_tables:
self.rebuildDb(delete_db=False, reason="Changed tables") # TODO: only update the changed table datas
except sqlite3.OperationalError:
pass
# Return db class
@util.Noparallel()
def getDb(self):
if self.event_db_busy: # Db not ready for queries
self.log.debug("Wating for db...")
self.event_db_busy.get() # Wait for event
if not self.db:
self.loadDb()
return self.db
def updateDbFile(self, inner_path, file=None, cur=None):
path = self.getPath(inner_path)
if cur:
db = cur.db
else:
db = self.getDb()
return db.updateJson(path, file, cur)
# Return possible db files for the site
@thread_pool_fs_read.wrap
def getDbFiles(self):
found = 0
for content_inner_path, content in self.site.content_manager.contents.items():
# content.json file itself
if self.isFile(content_inner_path):
yield content_inner_path, self.getPath(content_inner_path)
else:
self.log.debug("[MISSING] %s" % content_inner_path)
# Data files in content.json
content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site
for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()):
if not file_relative_path.endswith(".json") and not file_relative_path.endswith("json.gz"):
continue # We only interesed in json files
file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir
file_inner_path = file_inner_path.strip("/") # Strip leading /
if self.isFile(file_inner_path):
yield file_inner_path, self.getPath(file_inner_path)
else:
self.log.debug("[MISSING] %s" % file_inner_path)
found += 1
if found % 100 == 0:
time.sleep(0.001) # Context switch to avoid UI block
# Rebuild sql cache
@util.Noparallel()
@thread_pool_fs_batch.wrap
def rebuildDb(self, delete_db=True, reason="Unknown"):
self.log.info("Rebuilding db (reason: %s)..." % reason)
self.has_db = self.isFile("dbschema.json")
if not self.has_db:
return False
schema = self.loadJson("dbschema.json")
db_path = self.getPath(schema["db_file"])
if os.path.isfile(db_path) and delete_db:
if self.db:
self.closeDb("rebuilding") # Close db if open
time.sleep(0.5)
self.log.info("Deleting %s" % db_path)
try:
os.unlink(db_path)
except Exception as err:
self.log.error("Delete error: %s" % err)
if not self.db:
self.db = self.openDb()
self.event_db_busy = gevent.event.AsyncResult()
self.log.info("Rebuild: Creating tables...")
# raise DbTableError if not valid
self.db.checkTables()
cur = self.db.getCursor()
cur.logging = False
s = time.time()
self.log.info("Rebuild: Getting db files...")
db_files = list(self.getDbFiles())
num_imported = 0
num_total = len(db_files)
num_error = 0
self.log.info("Rebuild: Importing data...")
try:
if num_total > 100:
self.site.messageWebsocket(
_["Database rebuilding...<br>Imported {0} of {1} files (error: {2})..."].format(
"0000", num_total, num_error
), "rebuild", 0
)
for file_inner_path, file_path in db_files:
try:
if self.updateDbFile(file_inner_path, file=open(file_path, "rb"), cur=cur):
num_imported += 1
except Exception as err:
self.log.error("Error importing %s: %s" % (file_inner_path, Debug.formatException(err)))
num_error += 1
if num_imported and num_imported % 100 == 0:
self.site.messageWebsocket(
_["Database rebuilding...<br>Imported {0} of {1} files (error: {2})..."].format(
num_imported, num_total, num_error
),
"rebuild", int(float(num_imported) / num_total * 100)
)
time.sleep(0.001) # Context switch to avoid UI block
finally:
cur.close()
if num_total > 100:
self.site.messageWebsocket(
_["Database rebuilding...<br>Imported {0} of {1} files (error: {2})..."].format(
num_imported, num_total, num_error
), "rebuild", 100
)
self.log.info("Rebuild: Imported %s data file in %.3fs" % (num_imported, time.time() - s))
self.event_db_busy.set(True) # Event done, notify waiters
self.event_db_busy = None # Clear event
self.db.commit("Rebuilt")
return True
# Execute sql query or rebuild on dberror
def query(self, query, params=None):
if not query.strip().upper().startswith("SELECT"):
raise Exception("Only SELECT query supported")
try:
res = self.getDb().execute(query, params)
except sqlite3.DatabaseError as err:
if err.__class__.__name__ == "DatabaseError":
self.log.error("Database error: %s, query: %s, try to rebuilding it..." % (err, query))
try:
self.rebuildDb(reason="Query error")
except sqlite3.OperationalError:
pass
res = self.db.cur.execute(query, params)
else:
raise err
return res
def ensureDir(self, inner_path):
try:
os.makedirs(self.getPath(inner_path))
except OSError as err:
if err.errno == errno.EEXIST:
return False
else:
raise err
return True
# Open file object
def open(self, inner_path, mode="rb", create_dirs=False, **kwargs):
file_path = self.getPath(inner_path)
if create_dirs:
file_inner_dir = os.path.dirname(inner_path)
self.ensureDir(file_inner_dir)
return open(file_path, mode, **kwargs)
# Open file object
@thread_pool_fs_read.wrap
def read(self, inner_path, mode="rb"):
return open(self.getPath(inner_path), mode).read()
@thread_pool_fs_write.wrap
def writeThread(self, inner_path, content):
file_path = self.getPath(inner_path)
# Create dir if not exist
self.ensureDir(os.path.dirname(inner_path))
# Write file
if hasattr(content, 'read'): # File-like object
with open(file_path, "wb") as file:
shutil.copyfileobj(content, file) # Write buff to disk
else: # Simple string
if inner_path == "content.json" and os.path.isfile(file_path):
helper.atomicWrite(file_path, content)
else:
with open(file_path, "wb") as file:
file.write(content)
# Write content to file
def write(self, inner_path, content):
self.writeThread(inner_path, content)
self.onUpdated(inner_path)
# Remove file from filesystem
def delete(self, inner_path):
file_path = self.getPath(inner_path)
os.unlink(file_path)
self.onUpdated(inner_path, file=False)
def deleteDir(self, inner_path):
dir_path = self.getPath(inner_path)
os.rmdir(dir_path)
def rename(self, inner_path_before, inner_path_after):
for retry in range(3):
rename_err = None
# To workaround "The process cannot access the file beacause it is being used by another process." error
try:
os.rename(self.getPath(inner_path_before), self.getPath(inner_path_after))
break
except Exception as err:
rename_err = err
self.log.error("%s rename error: %s (retry #%s)" % (inner_path_before, err, retry))
time.sleep(0.1 + retry)
if rename_err:
raise rename_err
# List files from a directory
@thread_pool_fs_read.wrap
def walk(self, dir_inner_path, ignore=None):
directory = self.getPath(dir_inner_path)
for root, dirs, files in os.walk(directory):
root = root.replace("\\", "/")
root_relative_path = re.sub("^%s" % re.escape(directory), "", root).lstrip("/")
for file_name in files:
if root_relative_path: # Not root dir
file_relative_path = root_relative_path + "/" + file_name
else:
file_relative_path = file_name
if ignore and SafeRe.match(ignore, file_relative_path):
continue
yield file_relative_path
# Don't scan directory that is in the ignore pattern
if ignore:
dirs_filtered = []
for dir_name in dirs:
if root_relative_path:
dir_relative_path = root_relative_path + "/" + dir_name
else:
dir_relative_path = dir_name
if ignore == ".*" or re.match(".*([|(]|^)%s([|)]|$)" % re.escape(dir_relative_path + "/.*"), ignore):
continue
dirs_filtered.append(dir_name)
dirs[:] = dirs_filtered
# list directories in a directory
@thread_pool_fs_read.wrap
def list(self, dir_inner_path):
directory = self.getPath(dir_inner_path)
return os.listdir(directory)
# Site content updated
def onUpdated(self, inner_path, file=None):
# Update Sql cache
should_load_to_db = inner_path.endswith(".json") or inner_path.endswith(".json.gz")
if inner_path == "dbschema.json":
self.has_db = self.isFile("dbschema.json")
# Reopen DB to check changes
if self.has_db:
self.closeDb("New dbschema")
gevent.spawn(self.getDb)
elif not config.disable_db and should_load_to_db and self.has_db: # Load json file to db
if config.verbose:
self.log.debug("Loading json file to db: %s (file: %s)" % (inner_path, file))
try:
self.updateDbFile(inner_path, file)
except Exception as err:
self.log.error("Json %s load error: %s" % (inner_path, Debug.formatException(err)))
self.closeDb("Json load error")
# Load and parse json file
@thread_pool_fs_read.wrap
def loadJson(self, inner_path):
with self.open(inner_path, "r", encoding="utf8") as file:
return json.load(file)
# Write formatted json file
def writeJson(self, inner_path, data):
# Write to disk
self.write(inner_path, helper.jsonDumps(data).encode("utf8"))
# Get file size
def getSize(self, inner_path):
path = self.getPath(inner_path)
try:
return os.path.getsize(path)
except Exception:
return 0
# File exist
def isFile(self, inner_path):
return os.path.isfile(self.getPath(inner_path))
# File or directory exist
def isExists(self, inner_path):
return os.path.exists(self.getPath(inner_path))
# Dir exist
def isDir(self, inner_path):
return os.path.isdir(self.getPath(inner_path))
# Security check and return path of site's file
def getPath(self, inner_path):
inner_path = inner_path.replace("\\", "/") # Windows separator fix
if not inner_path:
return self.directory
if "../" in inner_path:
raise Exception("File not allowed: %s" % inner_path)
return "%s/%s" % (self.directory, inner_path)
# Get site dir relative path
def getInnerPath(self, path):
if path == self.directory:
inner_path = ""
else:
if path.startswith(self.directory):
inner_path = path[len(self.directory) + 1:]
else:
raise Exception("File not allowed: %s" % path)
return inner_path
# Verify all files sha512sum using content.json
def verifyFiles(self, quick_check=False, add_optional=False, add_changed=True):
bad_files = []
back = defaultdict(int)
back["bad_files"] = bad_files
i = 0
self.log.debug("Verifing files...")
if not self.site.content_manager.contents.get("content.json"): # No content.json, download it first
self.log.debug("VerifyFile content.json not exists")
self.site.needFile("content.json", update=True) # Force update to fix corrupt file
self.site.content_manager.loadContent() # Reload content.json
for content_inner_path, content in list(self.site.content_manager.contents.items()):
back["num_content"] += 1
i += 1
if i % 50 == 0:
time.sleep(0.001) # Context switch to avoid gevent hangs
if not os.path.isfile(self.getPath(content_inner_path)): # Missing content.json file
back["num_content_missing"] += 1
self.log.debug("[MISSING] %s" % content_inner_path)
bad_files.append(content_inner_path)
for file_relative_path in list(content.get("files", {}).keys()):
back["num_file"] += 1
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
file_inner_path = file_inner_path.strip("/") # Strip leading /
file_path = self.getPath(file_inner_path)
if not os.path.isfile(file_path):
back["num_file_missing"] += 1
self.log.debug("[MISSING] %s" % file_inner_path)
bad_files.append(file_inner_path)
continue
if quick_check:
ok = os.path.getsize(file_path) == content["files"][file_relative_path]["size"]
if not ok:
err = "Invalid size"
else:
try:
ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
except Exception as err:
ok = False
if not ok:
back["num_file_invalid"] += 1
self.log.debug("[INVALID] %s: %s" % (file_inner_path, err))
if add_changed or content.get("cert_user_id"): # If updating own site only add changed user files
bad_files.append(file_inner_path)
# Optional files
optional_added = 0
optional_removed = 0
for file_relative_path in list(content.get("files_optional", {}).keys()):
back["num_optional"] += 1
file_node = content["files_optional"][file_relative_path]
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
file_inner_path = file_inner_path.strip("/") # Strip leading /
file_path = self.getPath(file_inner_path)
hash_id = self.site.content_manager.hashfield.getHashId(file_node["sha512"])
if not os.path.isfile(file_path):
if self.site.content_manager.isDownloaded(file_inner_path, hash_id):
back["num_optional_removed"] += 1
self.log.debug("[OPTIONAL MISSING] %s" % file_inner_path)
self.site.content_manager.optionalRemoved(file_inner_path, hash_id, file_node["size"])
if add_optional and self.site.isDownloadable(file_inner_path):
self.log.debug("[OPTIONAL ADDING] %s" % file_inner_path)
bad_files.append(file_inner_path)
continue
if quick_check:
ok = os.path.getsize(file_path) == content["files_optional"][file_relative_path]["size"]
else:
try:
ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
except Exception as err:
ok = False
if ok:
if not self.site.content_manager.isDownloaded(file_inner_path, hash_id):
back["num_optional_added"] += 1
self.site.content_manager.optionalDownloaded(file_inner_path, hash_id, file_node["size"])
optional_added += 1
self.log.debug("[OPTIONAL FOUND] %s" % file_inner_path)
else:
if self.site.content_manager.isDownloaded(file_inner_path, hash_id):
back["num_optional_removed"] += 1
self.site.content_manager.optionalRemoved(file_inner_path, hash_id, file_node["size"])
optional_removed += 1
bad_files.append(file_inner_path)
self.log.debug("[OPTIONAL CHANGED] %s" % file_inner_path)
if config.verbose:
self.log.debug(
"%s verified: %s, quick: %s, optionals: +%s -%s" %
(content_inner_path, len(content["files"]), quick_check, optional_added, optional_removed)
)
self.site.content_manager.contents.db.processDelayed()
time.sleep(0.001) # Context switch to avoid gevent hangs
return back
# Check and try to fix site files integrity
def updateBadFiles(self, quick_check=True):
s = time.time()
res = self.verifyFiles(
quick_check,
add_optional=True,
add_changed=not self.site.settings.get("own") # Don't overwrite changed files if site owned
)
bad_files = res["bad_files"]
self.site.bad_files = {}
if bad_files:
for bad_file in bad_files:
self.site.bad_files[bad_file] = 1
self.log.debug("Checked files in %.2fs... Found bad files: %s, Quick:%s" % (time.time() - s, len(bad_files), quick_check))
# Delete site's all file
@thread_pool_fs_batch.wrap
def deleteFiles(self):
site_title = self.site.content_manager.contents.get("content.json", {}).get("title", self.site.address)
message_id = "delete-%s" % self.site.address
self.log.debug("Deleting files from content.json (title: %s)..." % site_title)
files = [] # Get filenames
content_inner_paths = list(self.site.content_manager.contents.keys())
for i, content_inner_path in enumerate(content_inner_paths):
content = self.site.content_manager.contents.get(content_inner_path, {})
files.append(content_inner_path)
# Add normal files
for file_relative_path in list(content.get("files", {}).keys()):
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
files.append(file_inner_path)
# Add optional files
for file_relative_path in list(content.get("files_optional", {}).keys()):
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
files.append(file_inner_path)
if i % 100 == 0:
num_files = len(files)
self.site.messageWebsocket(
_("Deleting site <b>{site_title}</b>...<br>Collected {num_files} files"),
message_id, (i / len(content_inner_paths)) * 25
)
if self.isFile("dbschema.json"):
self.log.debug("Deleting db file...")
self.closeDb("Deleting site")
self.has_db = False
try:
schema = self.loadJson("dbschema.json")
db_path = self.getPath(schema["db_file"])
if os.path.isfile(db_path):
os.unlink(db_path)
except Exception as err:
self.log.error("Db file delete error: %s" % err)
num_files = len(files)
for i, inner_path in enumerate(files):
path = self.getPath(inner_path)
if os.path.isfile(path):
for retry in range(5):
try:
os.unlink(path)
break
except Exception as err:
self.log.error("Error removing %s: %s, try #%s" % (inner_path, err, retry))
time.sleep(float(retry) / 10)
if i % 100 == 0:
self.site.messageWebsocket(
_("Deleting site <b>{site_title}</b>...<br>Deleting file {i}/{num_files}"),
message_id, 25 + (i / num_files) * 50
)
self.onUpdated(inner_path, False)
self.log.debug("Deleting empty dirs...")
i = 0
for root, dirs, files in os.walk(self.directory, topdown=False):
for dir in dirs:
path = os.path.join(root, dir)
if os.path.isdir(path):
try:
i += 1
if i % 100 == 0:
self.site.messageWebsocket(
_("Deleting site <b>{site_title}</b>...<br>Deleting empty directories {i}"),
message_id, 85
)
os.rmdir(path)
except OSError: # Not empty
pass
if os.path.isdir(self.directory) and os.listdir(self.directory) == []:
os.rmdir(self.directory) # Remove sites directory if empty
if os.path.isdir(self.directory):
self.log.debug("Some unknown file remained in site data dir: %s..." % self.directory)
self.site.messageWebsocket(
_("Deleting site <b>{site_title}</b>...<br>Site deleted, but some unknown files left in the directory"),
message_id, 100
)
return False # Some files not deleted
else:
self.log.debug("Site %s data directory deleted: %s..." % (site_title, self.directory))
self.site.messageWebsocket(
_("Deleting site <b>{site_title}</b>...<br>All files deleted successfully"),
message_id, 100
)
return True # All clean

View File

View File

@ -1,162 +0,0 @@
#!/usr/bin/python2
from gevent import monkey
monkey.patch_all()
import os
import time
import sys
import socket
import ssl
sys.path.append(os.path.abspath("..")) # Imports relative to src dir
import io as StringIO
import gevent
from gevent.server import StreamServer
from gevent.pool import Pool
from Config import config
config.parse()
from util import SslPatch
# Server
socks = []
data = os.urandom(1024 * 100)
data += "\n"
def handle(sock_raw, addr):
socks.append(sock_raw)
sock = sock_raw
# sock = ctx.wrap_socket(sock, server_side=True)
# if sock_raw.recv( 1, gevent.socket.MSG_PEEK ) == "\x16":
# sock = gevent.ssl.wrap_socket(sock_raw, server_side=True, keyfile='key-cz.pem',
# certfile='cert-cz.pem', ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1)
# fp = os.fdopen(sock.fileno(), 'rb', 1024*512)
try:
while True:
line = sock.recv(16 * 1024)
if not line:
break
if line == "bye\n":
break
elif line == "gotssl\n":
sock.sendall("yes\n")
sock = gevent.ssl.wrap_socket(
sock_raw, server_side=True, keyfile='../../data/key-rsa.pem', certfile='../../data/cert-rsa.pem',
ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1
)
else:
sock.sendall(data)
except Exception as err:
print(err)
try:
sock.shutdown(gevent.socket.SHUT_WR)
sock.close()
except:
pass
socks.remove(sock_raw)
pool = Pool(1000) # do not accept more than 10000 connections
server = StreamServer(('127.0.0.1', 1234), handle)
server.start()
# Client
total_num = 0
total_bytes = 0
clipher = None
ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:ECDH+AES128:ECDHE-RSA-AES128-GCM-SHA256:AES128-GCM-SHA256:AES128-SHA256:AES128-SHA:HIGH:" + \
"!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK"
# ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
def getData():
global total_num, total_bytes, clipher
data = None
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# sock = socket.ssl(s)
# sock = ssl.wrap_socket(sock)
sock.connect(("127.0.0.1", 1234))
# sock.do_handshake()
# clipher = sock.cipher()
sock.send("gotssl\n")
if sock.recv(128) == "yes\n":
sock = ssl.wrap_socket(sock, ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1)
sock.do_handshake()
clipher = sock.cipher()
for req in range(20):
sock.sendall("req\n")
buff = StringIO.StringIO()
data = sock.recv(16 * 1024)
buff.write(data)
if not data:
break
while not data.endswith("\n"):
data = sock.recv(16 * 1024)
if not data:
break
buff.write(data)
total_num += 1
total_bytes += buff.tell()
if not data:
print("No data")
sock.shutdown(gevent.socket.SHUT_WR)
sock.close()
s = time.time()
def info():
import psutil
import os
process = psutil.Process(os.getpid())
if "memory_info" in dir(process):
memory_info = process.memory_info
else:
memory_info = process.get_memory_info
while 1:
print(total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s, end=' ')
print("using", clipher, "Mem:", memory_info()[0] / float(2 ** 20))
time.sleep(1)
gevent.spawn(info)
for test in range(1):
clients = []
for i in range(500): # Thread
clients.append(gevent.spawn(getData))
gevent.joinall(clients)
print(total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s)
# Separate client/server process:
# 10*10*100:
# Raw: 10000 req 1000009 kbytes transfered in 5.39999985695
# RSA 2048: 10000 req 1000009 kbytes transfered in 27.7890000343 using ('ECDHE-RSA-AES256-SHA', 'TLSv1/SSLv3', 256)
# ECC: 10000 req 1000009 kbytes transfered in 26.1959998608 using ('ECDHE-ECDSA-AES256-SHA', 'TLSv1/SSLv3', 256)
# ECC: 10000 req 1000009 kbytes transfered in 28.2410001755 using ('ECDHE-ECDSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 13.3828125
#
# 10*100*10:
# Raw: 10000 req 1000009 kbytes transfered in 7.02700018883 Mem: 14.328125
# RSA 2048: 10000 req 1000009 kbytes transfered in 44.8860001564 using ('ECDHE-RSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 20.078125
# ECC: 10000 req 1000009 kbytes transfered in 37.9430000782 using ('ECDHE-ECDSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 20.0234375
#
# 1*100*100:
# Raw: 10000 req 1000009 kbytes transfered in 4.64400005341 Mem: 14.06640625
# RSA: 10000 req 1000009 kbytes transfered in 24.2300000191 using ('ECDHE-RSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 19.7734375
# ECC: 10000 req 1000009 kbytes transfered in 22.8849999905 using ('ECDHE-ECDSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 17.8125
# AES128: 10000 req 1000009 kbytes transfered in 21.2839999199 using ('AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 14.1328125
# ECC+128: 10000 req 1000009 kbytes transfered in 20.496999979 using ('ECDHE-ECDSA-AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 14.40234375
#
#
# Single process:
# 1*100*100
# RSA: 10000 req 1000009 kbytes transfered in 41.7899999619 using ('ECDHE-RSA-AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 26.91015625
#
# 10*10*100
# RSA: 10000 req 1000009 kbytes transfered in 40.1640000343 using ('ECDHE-RSA-AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 14.94921875

View File

@ -1,23 +0,0 @@
import logging
class Spy:
def __init__(self, obj, func_name):
self.obj = obj
self.__name__ = func_name
self.func_original = getattr(self.obj, func_name)
self.calls = []
def __enter__(self, *args, **kwargs):
logging.debug("Spy started")
def loggedFunc(cls, *args, **kwargs):
call = dict(enumerate(args, 1))
call[0] = cls
call.update(kwargs)
logging.debug("Spy call: %s" % call)
self.calls.append(call)
return self.func_original(cls, *args, **kwargs)
setattr(self.obj, self.__name__, loggedFunc)
return self.calls
def __exit__(self, *args, **kwargs):
setattr(self.obj, self.__name__, self.func_original)

View File

@ -1,59 +0,0 @@
import time
from util import Cached
class CachedObject:
def __init__(self):
self.num_called_add = 0
self.num_called_multiply = 0
self.num_called_none = 0
@Cached(timeout=1)
def calcAdd(self, a, b):
self.num_called_add += 1
return a + b
@Cached(timeout=1)
def calcMultiply(self, a, b):
self.num_called_multiply += 1
return a * b
@Cached(timeout=1)
def none(self):
self.num_called_none += 1
return None
class TestCached:
def testNoneValue(self):
cached_object = CachedObject()
assert cached_object.none() is None
assert cached_object.none() is None
assert cached_object.num_called_none == 1
time.sleep(2)
assert cached_object.none() is None
assert cached_object.num_called_none == 2
def testCall(self):
cached_object = CachedObject()
assert cached_object.calcAdd(1, 2) == 3
assert cached_object.calcAdd(1, 2) == 3
assert cached_object.calcMultiply(1, 2) == 2
assert cached_object.calcMultiply(1, 2) == 2
assert cached_object.num_called_add == 1
assert cached_object.num_called_multiply == 1
assert cached_object.calcAdd(2, 3) == 5
assert cached_object.calcAdd(2, 3) == 5
assert cached_object.num_called_add == 2
assert cached_object.calcAdd(1, 2) == 3
assert cached_object.calcMultiply(2, 3) == 6
assert cached_object.num_called_add == 2
assert cached_object.num_called_multiply == 2
time.sleep(2)
assert cached_object.calcAdd(1, 2) == 3
assert cached_object.num_called_add == 3

View File

@ -1,31 +0,0 @@
import pytest
import Config
@pytest.mark.usefixtures("resetSettings")
class TestConfig:
def testParse(self):
# Defaults
config_test = Config.Config("zeronet.py".split(" "))
config_test.parse(silent=True, parse_config=False)
assert not config_test.debug
assert not config_test.debug_socket
# Test parse command line with unknown parameters (ui_password)
config_test = Config.Config("zeronet.py --debug --debug_socket --ui_password hello".split(" "))
config_test.parse(silent=True, parse_config=False)
assert config_test.debug
assert config_test.debug_socket
with pytest.raises(AttributeError):
config_test.ui_password
# More complex test
args = "zeronet.py --unknown_arg --debug --debug_socket --ui_restrict 127.0.0.1 1.2.3.4 "
args += "--another_unknown argument --use_openssl False siteSign address privatekey --inner_path users/content.json"
config_test = Config.Config(args.split(" "))
config_test.parse(silent=True, parse_config=False)
assert config_test.debug
assert "1.2.3.4" in config_test.ui_restrict
assert not config_test.use_openssl
assert config_test.inner_path == "users/content.json"

View File

@ -1,118 +0,0 @@
import time
import socket
import gevent
import pytest
import mock
from Crypt import CryptConnection
from Connection import ConnectionServer
from Config import config
@pytest.mark.usefixtures("resetSettings")
class TestConnection:
def testIpv6(self, file_server6):
assert ":" in file_server6.ip
client = ConnectionServer(file_server6.ip, 1545)
connection = client.getConnection(file_server6.ip, 1544)
assert connection.ping()
# Close connection
connection.close()
client.stop()
time.sleep(0.01)
assert len(file_server6.connections) == 0
# Should not able to reach on ipv4 ip
with pytest.raises(socket.error) as err:
client = ConnectionServer("127.0.0.1", 1545)
connection = client.getConnection("127.0.0.1", 1544)
def testSslConnection(self, file_server):
client = ConnectionServer(file_server.ip, 1545)
assert file_server != client
# Connect to myself
with mock.patch('Config.config.ip_local', return_value=[]): # SSL not used for local ips
connection = client.getConnection(file_server.ip, 1544)
assert len(file_server.connections) == 1
assert connection.handshake
assert connection.crypt
# Close connection
connection.close("Test ended")
client.stop()
time.sleep(0.1)
assert len(file_server.connections) == 0
assert file_server.num_incoming == 2 # One for file_server fixture, one for the test
def testRawConnection(self, file_server):
client = ConnectionServer(file_server.ip, 1545)
assert file_server != client
# Remove all supported crypto
crypt_supported_bk = CryptConnection.manager.crypt_supported
CryptConnection.manager.crypt_supported = []
with mock.patch('Config.config.ip_local', return_value=[]): # SSL not used for local ips
connection = client.getConnection(file_server.ip, 1544)
assert len(file_server.connections) == 1
assert not connection.crypt
# Close connection
connection.close()
client.stop()
time.sleep(0.01)
assert len(file_server.connections) == 0
# Reset supported crypts
CryptConnection.manager.crypt_supported = crypt_supported_bk
def testPing(self, file_server, site):
client = ConnectionServer(file_server.ip, 1545)
connection = client.getConnection(file_server.ip, 1544)
assert connection.ping()
connection.close()
client.stop()
def testGetConnection(self, file_server):
client = ConnectionServer(file_server.ip, 1545)
connection = client.getConnection(file_server.ip, 1544)
# Get connection by ip/port
connection2 = client.getConnection(file_server.ip, 1544)
assert connection == connection2
# Get connection by peerid
assert not client.getConnection(file_server.ip, 1544, peer_id="notexists", create=False)
connection2 = client.getConnection(file_server.ip, 1544, peer_id=connection.handshake["peer_id"], create=False)
assert connection2 == connection
connection.close()
client.stop()
def testFloodProtection(self, file_server):
whitelist = file_server.whitelist # Save for reset
file_server.whitelist = [] # Disable 127.0.0.1 whitelist
client = ConnectionServer(file_server.ip, 1545)
# Only allow 6 connection in 1 minute
for reconnect in range(6):
connection = client.getConnection(file_server.ip, 1544)
assert connection.handshake
connection.close()
# The 7. one will timeout
with pytest.raises(gevent.Timeout):
with gevent.Timeout(0.1):
connection = client.getConnection(file_server.ip, 1544)
# Reset whitelist
file_server.whitelist = whitelist

View File

@ -1,273 +0,0 @@
import json
import time
import io
import pytest
from Crypt import CryptBitcoin
from Content.ContentManager import VerifyError, SignError
from util.SafeRe import UnsafePatternError
@pytest.mark.usefixtures("resetSettings")
class TestContent:
privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv"
def testInclude(self, site):
# Rules defined in parent content.json
rules = site.content_manager.getRules("data/test_include/content.json")
assert rules["signers"] == ["15ik6LeBWnACWfaika1xqGapRZ1zh3JpCo"] # Valid signer
assert rules["user_name"] == "test" # Extra data
assert rules["max_size"] == 20000 # Max size of files
assert not rules["includes_allowed"] # Don't allow more includes
assert rules["files_allowed"] == "data.json" # Allowed file pattern
# Valid signers for "data/test_include/content.json"
valid_signers = site.content_manager.getValidSigners("data/test_include/content.json")
assert "15ik6LeBWnACWfaika1xqGapRZ1zh3JpCo" in valid_signers # Extra valid signer defined in parent content.json
assert "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" in valid_signers # The site itself
assert len(valid_signers) == 2 # No more
# Valid signers for "data/users/content.json"
valid_signers = site.content_manager.getValidSigners("data/users/content.json")
assert "1LSxsKfC9S9TVXGGNSM3vPHjyW82jgCX5f" in valid_signers # Extra valid signer defined in parent content.json
assert "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" in valid_signers # The site itself
assert len(valid_signers) == 2
# Valid signers for root content.json
assert site.content_manager.getValidSigners("content.json") == ["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"]
def testInlcudeLimits(self, site, crypt_bitcoin_lib):
# Data validation
res = []
data_dict = {
"files": {
"data.json": {
"sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906",
"size": 505
}
},
"modified": time.time()
}
# Normal data
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)}
data_json = json.dumps(data_dict).encode()
data = io.BytesIO(data_json)
assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
# Reset
del data_dict["signs"]
# Too large
data_dict["files"]["data.json"]["size"] = 200000 # Emulate 2MB sized data.json
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)}
data = io.BytesIO(json.dumps(data_dict).encode())
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
assert "Include too large" in str(err.value)
# Reset
data_dict["files"]["data.json"]["size"] = 505
del data_dict["signs"]
# Not allowed file
data_dict["files"]["notallowed.exe"] = data_dict["files"]["data.json"]
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)}
data = io.BytesIO(json.dumps(data_dict).encode())
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
assert "File not allowed" in str(err.value)
# Reset
del data_dict["files"]["notallowed.exe"]
del data_dict["signs"]
# Should work again
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)}
data = io.BytesIO(json.dumps(data_dict).encode())
assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
@pytest.mark.parametrize("inner_path", ["content.json", "data/test_include/content.json", "data/users/content.json"])
def testSign(self, site, inner_path):
# Bad privatekey
with pytest.raises(SignError) as err:
site.content_manager.sign(inner_path, privatekey="5aaa3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMnaa", filewrite=False)
assert "Private key invalid" in str(err.value)
# Good privatekey
content = site.content_manager.sign(inner_path, privatekey=self.privatekey, filewrite=False)
content_old = site.content_manager.contents[inner_path] # Content before the sign
assert not content_old == content # Timestamp changed
assert site.address in content["signs"] # Used the site's private key to sign
if inner_path == "content.json":
assert len(content["files"]) == 17
elif inner_path == "data/test-include/content.json":
assert len(content["files"]) == 1
elif inner_path == "data/users/content.json":
assert len(content["files"]) == 0
# Everything should be same as before except the modified timestamp and the signs
assert (
{key: val for key, val in content_old.items() if key not in ["modified", "signs", "sign", "zeronet_version"]}
==
{key: val for key, val in content.items() if key not in ["modified", "signs", "sign", "zeronet_version"]}
)
def testSignOptionalFiles(self, site):
for hash in list(site.content_manager.hashfield):
site.content_manager.hashfield.remove(hash)
assert len(site.content_manager.hashfield) == 0
site.content_manager.contents["content.json"]["optional"] = "((data/img/zero.*))"
content_optional = site.content_manager.sign(privatekey=self.privatekey, filewrite=False, remove_missing_optional=True)
del site.content_manager.contents["content.json"]["optional"]
content_nooptional = site.content_manager.sign(privatekey=self.privatekey, filewrite=False, remove_missing_optional=True)
assert len(content_nooptional.get("files_optional", {})) == 0 # No optional files if no pattern
assert len(content_optional["files_optional"]) > 0
assert len(site.content_manager.hashfield) == len(content_optional["files_optional"]) # Hashed optional files should be added to hashfield
assert len(content_nooptional["files"]) > len(content_optional["files"])
def testFileInfo(self, site):
assert "sha512" in site.content_manager.getFileInfo("index.html")
assert site.content_manager.getFileInfo("data/img/domain.png")["content_inner_path"] == "content.json"
assert site.content_manager.getFileInfo("data/users/hello.png")["content_inner_path"] == "data/users/content.json"
assert site.content_manager.getFileInfo("data/users/content.json")["content_inner_path"] == "data/users/content.json"
assert not site.content_manager.getFileInfo("notexist")
# Optional file
file_info_optional = site.content_manager.getFileInfo("data/optional.txt")
assert "sha512" in file_info_optional
assert file_info_optional["optional"] is True
# Not exists yet user content.json
assert "cert_signers" in site.content_manager.getFileInfo("data/users/unknown/content.json")
# Optional user file
file_info_optional = site.content_manager.getFileInfo("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert "sha512" in file_info_optional
assert file_info_optional["optional"] is True
def testVerify(self, site, crypt_bitcoin_lib):
inner_path = "data/test_include/content.json"
data_dict = site.storage.loadJson(inner_path)
data = io.BytesIO(json.dumps(data_dict).encode("utf8"))
# Re-sign
data_dict["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
}
assert site.content_manager.verifyFile(inner_path, data, ignore_same=False)
# Wrong address
data_dict["address"] = "Othersite"
del data_dict["signs"]
data_dict["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
}
data = io.BytesIO(json.dumps(data_dict).encode())
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(inner_path, data, ignore_same=False)
assert "Wrong site address" in str(err.value)
# Wrong inner_path
data_dict["address"] = "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"
data_dict["inner_path"] = "content.json"
del data_dict["signs"]
data_dict["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
}
data = io.BytesIO(json.dumps(data_dict).encode())
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(inner_path, data, ignore_same=False)
assert "Wrong inner_path" in str(err.value)
# Everything right again
data_dict["address"] = "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"
data_dict["inner_path"] = inner_path
del data_dict["signs"]
data_dict["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
}
data = io.BytesIO(json.dumps(data_dict).encode())
assert site.content_manager.verifyFile(inner_path, data, ignore_same=False)
def testVerifyInnerPath(self, site, crypt_bitcoin_lib):
inner_path = "content.json"
data_dict = site.storage.loadJson(inner_path)
for good_relative_path in ["data.json", "out/data.json", "Any File [by none] (1).jpg", "árvzítűrő/tükörfúrógép.txt"]:
data_dict["files"] = {good_relative_path: {"sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906", "size": 505}}
if "sign" in data_dict:
del data_dict["sign"]
del data_dict["signs"]
data_dict["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
}
data = io.BytesIO(json.dumps(data_dict).encode())
assert site.content_manager.verifyFile(inner_path, data, ignore_same=False)
for bad_relative_path in ["../data.json", "data/" * 100, "invalid|file.jpg", "con.txt", "any/con.txt"]:
data_dict["files"] = {bad_relative_path: {"sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906", "size": 505}}
if "sign" in data_dict:
del data_dict["sign"]
del data_dict["signs"]
data_dict["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
}
data = io.BytesIO(json.dumps(data_dict).encode())
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(inner_path, data, ignore_same=False)
assert "Invalid relative path" in str(err.value)
@pytest.mark.parametrize("key", ["ignore", "optional"])
def testSignUnsafePattern(self, site, key):
site.content_manager.contents["content.json"][key] = "([a-zA-Z]+)*"
with pytest.raises(UnsafePatternError) as err:
site.content_manager.sign("content.json", privatekey=self.privatekey, filewrite=False)
assert "Potentially unsafe" in str(err.value)
def testVerifyUnsafePattern(self, site, crypt_bitcoin_lib):
site.content_manager.contents["content.json"]["includes"]["data/test_include/content.json"]["files_allowed"] = "([a-zA-Z]+)*"
with pytest.raises(UnsafePatternError) as err:
with site.storage.open("data/test_include/content.json") as data:
site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
assert "Potentially unsafe" in str(err.value)
site.content_manager.contents["data/users/content.json"]["user_contents"]["permission_rules"]["([a-zA-Z]+)*"] = {"max_size": 0}
with pytest.raises(UnsafePatternError) as err:
with site.storage.open("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json") as data:
site.content_manager.verifyFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", data, ignore_same=False)
assert "Potentially unsafe" in str(err.value)
def testPathValidation(self, site):
assert site.content_manager.isValidRelativePath("test.txt")
assert site.content_manager.isValidRelativePath("test/!@#$%^&().txt")
assert site.content_manager.isValidRelativePath("ÜøßÂŒƂÆÇ.txt")
assert site.content_manager.isValidRelativePath("тест.текст")
assert site.content_manager.isValidRelativePath("𝐮𝐧𝐢𝐜𝐨𝐝𝐞𝑖𝑠𝒂𝒘𝒆𝒔𝒐𝒎𝒆")
# Test rules based on https://stackoverflow.com/questions/1976007/what-characters-are-forbidden-in-windows-and-linux-directory-names
assert not site.content_manager.isValidRelativePath("any\\hello.txt") # \ not allowed
assert not site.content_manager.isValidRelativePath("/hello.txt") # Cannot start with /
assert not site.content_manager.isValidRelativePath("\\hello.txt") # Cannot start with \
assert not site.content_manager.isValidRelativePath("../hello.txt") # Not allowed .. in path
assert not site.content_manager.isValidRelativePath("\0hello.txt") # NULL character
assert not site.content_manager.isValidRelativePath("\31hello.txt") # 0-31 (ASCII control characters)
assert not site.content_manager.isValidRelativePath("any/hello.txt ") # Cannot end with space
assert not site.content_manager.isValidRelativePath("any/hello.txt.") # Cannot end with dot
assert site.content_manager.isValidRelativePath(".hello.txt") # Allow start with dot
assert not site.content_manager.isValidRelativePath("any/CON") # Protected names on Windows
assert not site.content_manager.isValidRelativePath("CON/any.txt")
assert not site.content_manager.isValidRelativePath("any/lpt1.txt")
assert site.content_manager.isValidRelativePath("any/CONAN")
assert not site.content_manager.isValidRelativePath("any/CONOUT$")
assert not site.content_manager.isValidRelativePath("a" * 256) # Max 255 characters allowed

View File

@ -1,390 +0,0 @@
import json
import io
import pytest
from Crypt import CryptBitcoin
from Content.ContentManager import VerifyError, SignError
@pytest.mark.usefixtures("resetSettings")
class TestContentUser:
def testSigners(self, site):
# File info for not existing user file
file_info = site.content_manager.getFileInfo("data/users/notexist/data.json")
assert file_info["content_inner_path"] == "data/users/notexist/content.json"
file_info = site.content_manager.getFileInfo("data/users/notexist/a/b/data.json")
assert file_info["content_inner_path"] == "data/users/notexist/content.json"
valid_signers = site.content_manager.getValidSigners("data/users/notexist/content.json")
assert valid_signers == ["14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet", "notexist", "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"]
# File info for exsitsing user file
valid_signers = site.content_manager.getValidSigners("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json")
assert '1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT' in valid_signers # The site address
assert '14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet' in valid_signers # Admin user defined in data/users/content.json
assert '1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C' in valid_signers # The user itself
assert len(valid_signers) == 3 # No more valid signers
# Valid signer for banned user
user_content = site.storage.loadJson("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json")
user_content["cert_user_id"] = "bad@zeroid.bit"
valid_signers = site.content_manager.getValidSigners("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
assert '1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT' in valid_signers # The site address
assert '14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet' in valid_signers # Admin user defined in data/users/content.json
assert '1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C' not in valid_signers # The user itself
def testRules(self, site):
# We going to manipulate it this test rules based on data/users/content.json
user_content = site.storage.loadJson("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json")
# Known user
user_content["cert_auth_type"] = "web"
user_content["cert_user_id"] = "nofish@zeroid.bit"
rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
assert rules["max_size"] == 100000
assert "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" in rules["signers"]
# Unknown user
user_content["cert_auth_type"] = "web"
user_content["cert_user_id"] = "noone@zeroid.bit"
rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
assert rules["max_size"] == 10000
assert "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" in rules["signers"]
# User with more size limit based on auth type
user_content["cert_auth_type"] = "bitmsg"
user_content["cert_user_id"] = "noone@zeroid.bit"
rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
assert rules["max_size"] == 15000
assert "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" in rules["signers"]
# Banned user
user_content["cert_auth_type"] = "web"
user_content["cert_user_id"] = "bad@zeroid.bit"
rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
assert "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" not in rules["signers"]
def testRulesAddress(self, site):
user_inner_path = "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json"
user_content = site.storage.loadJson(user_inner_path)
rules = site.content_manager.getRules(user_inner_path, user_content)
assert rules["max_size"] == 10000
assert "1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9" in rules["signers"]
users_content = site.content_manager.contents["data/users/content.json"]
# Ban user based on address
users_content["user_contents"]["permissions"]["1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9"] = False
rules = site.content_manager.getRules(user_inner_path, user_content)
assert "1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9" not in rules["signers"]
# Change max allowed size
users_content["user_contents"]["permissions"]["1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9"] = {"max_size": 20000}
rules = site.content_manager.getRules(user_inner_path, user_content)
assert rules["max_size"] == 20000
def testVerifyAddress(self, site):
privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT
user_inner_path = "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json"
data_dict = site.storage.loadJson(user_inner_path)
users_content = site.content_manager.contents["data/users/content.json"]
data = io.BytesIO(json.dumps(data_dict).encode())
assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
# Test error on 15k data.json
data_dict["files"]["data.json"]["size"] = 1024 * 15
del data_dict["signs"] # Remove signs before signing
data_dict["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
}
data = io.BytesIO(json.dumps(data_dict).encode())
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
assert "Include too large" in str(err.value)
# Give more space based on address
users_content["user_contents"]["permissions"]["1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9"] = {"max_size": 20000}
del data_dict["signs"] # Remove signs before signing
data_dict["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
}
data = io.BytesIO(json.dumps(data_dict).encode())
assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
def testVerify(self, site):
privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT
user_inner_path = "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json"
data_dict = site.storage.loadJson(user_inner_path)
users_content = site.content_manager.contents["data/users/content.json"]
data = io.BytesIO(json.dumps(data_dict).encode())
assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
# Test max size exception by setting allowed to 0
rules = site.content_manager.getRules(user_inner_path, data_dict)
assert rules["max_size"] == 10000
assert users_content["user_contents"]["permission_rules"][".*"]["max_size"] == 10000
users_content["user_contents"]["permission_rules"][".*"]["max_size"] = 0
rules = site.content_manager.getRules(user_inner_path, data_dict)
assert rules["max_size"] == 0
data = io.BytesIO(json.dumps(data_dict).encode())
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
assert "Include too large" in str(err.value)
users_content["user_contents"]["permission_rules"][".*"]["max_size"] = 10000 # Reset
# Test max optional size exception
# 1 MB gif = Allowed
data_dict["files_optional"]["peanut-butter-jelly-time.gif"]["size"] = 1024 * 1024
del data_dict["signs"] # Remove signs before signing
data_dict["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
}
data = io.BytesIO(json.dumps(data_dict).encode())
assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
# 100 MB gif = Not allowed
data_dict["files_optional"]["peanut-butter-jelly-time.gif"]["size"] = 100 * 1024 * 1024
del data_dict["signs"] # Remove signs before signing
data_dict["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
}
data = io.BytesIO(json.dumps(data_dict).encode())
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
assert "Include optional files too large" in str(err.value)
data_dict["files_optional"]["peanut-butter-jelly-time.gif"]["size"] = 1024 * 1024 # Reset
# hello.exe = Not allowed
data_dict["files_optional"]["hello.exe"] = data_dict["files_optional"]["peanut-butter-jelly-time.gif"]
del data_dict["signs"] # Remove signs before signing
data_dict["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
}
data = io.BytesIO(json.dumps(data_dict).encode())
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
assert "Optional file not allowed" in str(err.value)
del data_dict["files_optional"]["hello.exe"] # Reset
# Includes not allowed in user content
data_dict["includes"] = {"other.json": {}}
del data_dict["signs"] # Remove signs before signing
data_dict["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
}
data = io.BytesIO(json.dumps(data_dict).encode())
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
assert "Includes not allowed" in str(err.value)
def testCert(self, site):
# user_addr = "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C"
user_priv = "5Kk7FSA63FC2ViKmKLuBxk9gQkaQ5713hKq8LmFAf4cVeXh6K6A"
# cert_addr = "14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet"
cert_priv = "5JusJDSjHaMHwUjDT3o6eQ54pA6poo8La5fAgn1wNc3iK59jxjA"
# Check if the user file is loaded
assert "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json" in site.content_manager.contents
user_content = site.content_manager.contents["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"]
rules_content = site.content_manager.contents["data/users/content.json"]
# Override valid cert signers for the test
rules_content["user_contents"]["cert_signers"]["zeroid.bit"] = [
"14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet",
"1iD5ZQJMNXu43w1qLB8sfdHVKppVMduGz"
]
# Check valid cert signers
rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
assert rules["cert_signers"] == {"zeroid.bit": [
"14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet",
"1iD5ZQJMNXu43w1qLB8sfdHVKppVMduGz"
]}
# Sign a valid cert
user_content["cert_sign"] = CryptBitcoin.sign("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % (
user_content["cert_auth_type"],
user_content["cert_user_id"].split("@")[0]
), cert_priv)
# Verify cert
assert site.content_manager.verifyCert("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
# Verify if the cert is valid for other address
assert not site.content_manager.verifyCert("data/users/badaddress/content.json", user_content)
# Sign user content
signed_content = site.content_manager.sign(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False
)
# Test user cert
assert site.content_manager.verifyFile(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
)
# Test banned user
cert_user_id = user_content["cert_user_id"] # My username
site.content_manager.contents["data/users/content.json"]["user_contents"]["permissions"][cert_user_id] = False
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
)
assert "Valid signs: 0/1" in str(err.value)
del site.content_manager.contents["data/users/content.json"]["user_contents"]["permissions"][cert_user_id] # Reset
# Test invalid cert
user_content["cert_sign"] = CryptBitcoin.sign(
"badaddress#%s/%s" % (user_content["cert_auth_type"], user_content["cert_user_id"]), cert_priv
)
signed_content = site.content_manager.sign(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False
)
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
)
assert "Invalid cert" in str(err.value)
# Test banned user, signed by the site owner
user_content["cert_sign"] = CryptBitcoin.sign("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % (
user_content["cert_auth_type"],
user_content["cert_user_id"].split("@")[0]
), cert_priv)
cert_user_id = user_content["cert_user_id"] # My username
site.content_manager.contents["data/users/content.json"]["user_contents"]["permissions"][cert_user_id] = False
site_privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT
del user_content["signs"] # Remove signs before signing
user_content["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(user_content, sort_keys=True), site_privatekey)
}
assert site.content_manager.verifyFile(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
io.BytesIO(json.dumps(user_content).encode()), ignore_same=False
)
def testMissingCert(self, site):
user_priv = "5Kk7FSA63FC2ViKmKLuBxk9gQkaQ5713hKq8LmFAf4cVeXh6K6A"
cert_priv = "5JusJDSjHaMHwUjDT3o6eQ54pA6poo8La5fAgn1wNc3iK59jxjA"
user_content = site.content_manager.contents["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"]
rules_content = site.content_manager.contents["data/users/content.json"]
# Override valid cert signers for the test
rules_content["user_contents"]["cert_signers"]["zeroid.bit"] = [
"14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet",
"1iD5ZQJMNXu43w1qLB8sfdHVKppVMduGz"
]
# Sign a valid cert
user_content["cert_sign"] = CryptBitcoin.sign("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % (
user_content["cert_auth_type"],
user_content["cert_user_id"].split("@")[0]
), cert_priv)
signed_content = site.content_manager.sign(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False
)
assert site.content_manager.verifyFile(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
)
# Test invalid cert_user_id
user_content["cert_user_id"] = "nodomain"
user_content["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(user_content, sort_keys=True), user_priv)
}
signed_content = site.content_manager.sign(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False
)
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
)
assert "Invalid domain in cert_user_id" in str(err.value)
# Test removed cert
del user_content["cert_user_id"]
del user_content["cert_auth_type"]
del user_content["signs"] # Remove signs before signing
user_content["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(user_content, sort_keys=True), user_priv)
}
signed_content = site.content_manager.sign(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False
)
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
)
assert "Missing cert_user_id" in str(err.value)
def testCertSignersPattern(self, site):
user_priv = "5Kk7FSA63FC2ViKmKLuBxk9gQkaQ5713hKq8LmFAf4cVeXh6K6A"
cert_priv = "5JusJDSjHaMHwUjDT3o6eQ54pA6poo8La5fAgn1wNc3iK59jxjA" # For 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet
user_content = site.content_manager.contents["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"]
rules_content = site.content_manager.contents["data/users/content.json"]
# Override valid cert signers for the test
rules_content["user_contents"]["cert_signers_pattern"] = "14wgQ[0-9][A-Z]"
# Sign a valid cert
user_content["cert_user_id"] = "certuser@14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet"
user_content["cert_sign"] = CryptBitcoin.sign("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % (
user_content["cert_auth_type"],
"certuser"
), cert_priv)
signed_content = site.content_manager.sign(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False
)
assert site.content_manager.verifyFile(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
)
# Cert does not matches the pattern
rules_content["user_contents"]["cert_signers_pattern"] = "14wgX[0-9][A-Z]"
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
)
assert "Invalid cert signer: 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" in str(err.value)
# Removed cert_signers_pattern
del rules_content["user_contents"]["cert_signers_pattern"]
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
)
assert "Invalid cert signer: 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" in str(err.value)
def testNewFile(self, site):
privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT
inner_path = "data/users/1NEWrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"
site.storage.writeJson(inner_path, {"test": "data"})
site.content_manager.sign(inner_path, privatekey)
assert "test" in site.storage.loadJson(inner_path)
site.storage.delete(inner_path)

View File

@ -1,48 +0,0 @@
from Crypt import CryptBitcoin
class TestCryptBitcoin:
def testSign(self, crypt_bitcoin_lib):
privatekey = "5K9S6dVpufGnroRgFrT6wsKiz2mJRYsC73eWDmajaHserAp3F1C"
privatekey_bad = "5Jbm9rrusXyApAoM8YoM4Rja337zMMoBUMRJ1uijiguU2aZRnwC"
# Get address by privatekey
address = crypt_bitcoin_lib.privatekeyToAddress(privatekey)
assert address == "1MpDMxFeDUkiHohxx9tbGLeEGEuR4ZNsJz"
address_bad = crypt_bitcoin_lib.privatekeyToAddress(privatekey_bad)
assert address_bad != "1MpDMxFeDUkiHohxx9tbGLeEGEuR4ZNsJz"
# Text signing
data_len_list = list(range(0, 300, 10))
data_len_list += [1024, 2048, 1024 * 128, 1024 * 1024, 1024 * 2048]
for data_len in data_len_list:
data = data_len * "!"
sign = crypt_bitcoin_lib.sign(data, privatekey)
assert crypt_bitcoin_lib.verify(data, address, sign)
assert not crypt_bitcoin_lib.verify("invalid" + data, address, sign)
# Signed by bad privatekey
sign_bad = crypt_bitcoin_lib.sign("hello", privatekey_bad)
assert not crypt_bitcoin_lib.verify("hello", address, sign_bad)
def testVerify(self, crypt_bitcoin_lib):
sign_uncompressed = b'G6YkcFTuwKMVMHI2yycGQIFGbCZVNsZEZvSlOhKpHUt/BlADY94egmDAWdlrbbFrP9wH4aKcEfbLO8sa6f63VU0='
assert crypt_bitcoin_lib.verify("1NQUem2M4cAqWua6BVFBADtcSP55P4QobM#web/gitcenter", "19Bir5zRm1yo4pw9uuxQL8xwf9b7jqMpR", sign_uncompressed)
sign_compressed = b'H6YkcFTuwKMVMHI2yycGQIFGbCZVNsZEZvSlOhKpHUt/BlADY94egmDAWdlrbbFrP9wH4aKcEfbLO8sa6f63VU0='
assert crypt_bitcoin_lib.verify("1NQUem2M4cAqWua6BVFBADtcSP55P4QobM#web/gitcenter", "1KH5BdNnqxh2KRWMMT8wUXzUgz4vVQ4S8p", sign_compressed)
def testNewPrivatekey(self):
assert CryptBitcoin.newPrivatekey() != CryptBitcoin.newPrivatekey()
assert CryptBitcoin.privatekeyToAddress(CryptBitcoin.newPrivatekey())
def testNewSeed(self):
assert CryptBitcoin.newSeed() != CryptBitcoin.newSeed()
assert CryptBitcoin.privatekeyToAddress(
CryptBitcoin.hdPrivatekey(CryptBitcoin.newSeed(), 0)
)
assert CryptBitcoin.privatekeyToAddress(
CryptBitcoin.hdPrivatekey(CryptBitcoin.newSeed(), 2**256)
)

View File

@ -1,23 +0,0 @@
import os
from Config import config
from Crypt import CryptConnection
class TestCryptConnection:
def testSslCert(self):
# Remove old certs
if os.path.isfile("%s/cert-rsa.pem" % config.data_dir):
os.unlink("%s/cert-rsa.pem" % config.data_dir)
if os.path.isfile("%s/key-rsa.pem" % config.data_dir):
os.unlink("%s/key-rsa.pem" % config.data_dir)
# Generate certs
CryptConnection.manager.loadCerts()
assert "tls-rsa" in CryptConnection.manager.crypt_supported
assert CryptConnection.manager.selectCrypt(["tls-rsa", "unknown"]) == "tls-rsa" # It should choose the known crypt
# Check openssl cert generation
assert os.path.isfile("%s/cert-rsa.pem" % config.data_dir)
assert os.path.isfile("%s/key-rsa.pem" % config.data_dir)

View File

@ -1,31 +0,0 @@
import base64
from Crypt import CryptHash
sha512t_sum_hex = "2e9466d8aa1f340c91203b4ddbe9b6669879616a1b8e9571058a74195937598d"
sha512t_sum_bin = b".\x94f\xd8\xaa\x1f4\x0c\x91 ;M\xdb\xe9\xb6f\x98yaj\x1b\x8e\x95q\x05\x8at\x19Y7Y\x8d"
sha256_sum_hex = "340cd04be7f530e3a7c1bc7b24f225ba5762ec7063a56e1ae01a30d56722e5c3"
class TestCryptBitcoin:
def testSha(self, site):
file_path = site.storage.getPath("dbschema.json")
assert CryptHash.sha512sum(file_path) == sha512t_sum_hex
assert CryptHash.sha512sum(open(file_path, "rb")) == sha512t_sum_hex
assert CryptHash.sha512sum(open(file_path, "rb"), format="digest") == sha512t_sum_bin
assert CryptHash.sha256sum(file_path) == sha256_sum_hex
assert CryptHash.sha256sum(open(file_path, "rb")) == sha256_sum_hex
with open(file_path, "rb") as f:
hash = CryptHash.Sha512t(f.read(100))
hash.hexdigest() != sha512t_sum_hex
hash.update(f.read(1024 * 1024))
assert hash.hexdigest() == sha512t_sum_hex
def testRandom(self):
assert len(CryptHash.random(64)) == 64
assert CryptHash.random() != CryptHash.random()
assert bytes.fromhex(CryptHash.random(encoding="hex"))
assert base64.b64decode(CryptHash.random(encoding="base64"))

View File

@ -1,137 +0,0 @@
import io
class TestDb:
def testCheckTables(self, db):
tables = [row["name"] for row in db.execute("SELECT name FROM sqlite_master WHERE type='table'")]
assert "keyvalue" in tables # To store simple key -> value
assert "json" in tables # Json file path registry
assert "test" in tables # The table defined in dbschema.json
# Verify test table
cols = [col["name"] for col in db.execute("PRAGMA table_info(test)")]
assert "test_id" in cols
assert "title" in cols
# Add new table
assert "newtest" not in tables
db.schema["tables"]["newtest"] = {
"cols": [
["newtest_id", "INTEGER"],
["newtitle", "TEXT"],
],
"indexes": ["CREATE UNIQUE INDEX newtest_id ON newtest(newtest_id)"],
"schema_changed": 1426195822
}
db.checkTables()
tables = [row["name"] for row in db.execute("SELECT name FROM sqlite_master WHERE type='table'")]
assert "test" in tables
assert "newtest" in tables
def testQueries(self, db):
# Test insert
for i in range(100):
db.execute("INSERT INTO test ?", {"test_id": i, "title": "Test #%s" % i})
assert db.execute("SELECT COUNT(*) AS num FROM test").fetchone()["num"] == 100
# Test single select
assert db.execute("SELECT COUNT(*) AS num FROM test WHERE ?", {"test_id": 1}).fetchone()["num"] == 1
# Test multiple select
assert db.execute("SELECT COUNT(*) AS num FROM test WHERE ?", {"test_id": [1, 2, 3]}).fetchone()["num"] == 3
assert db.execute(
"SELECT COUNT(*) AS num FROM test WHERE ?",
{"test_id": [1, 2, 3], "title": "Test #2"}
).fetchone()["num"] == 1
assert db.execute(
"SELECT COUNT(*) AS num FROM test WHERE ?",
{"test_id": [1, 2, 3], "title": ["Test #2", "Test #3", "Test #4"]}
).fetchone()["num"] == 2
# Test multiple select using named params
assert db.execute("SELECT COUNT(*) AS num FROM test WHERE test_id IN :test_id", {"test_id": [1, 2, 3]}).fetchone()["num"] == 3
assert db.execute(
"SELECT COUNT(*) AS num FROM test WHERE test_id IN :test_id AND title = :title",
{"test_id": [1, 2, 3], "title": "Test #2"}
).fetchone()["num"] == 1
assert db.execute(
"SELECT COUNT(*) AS num FROM test WHERE test_id IN :test_id AND title IN :title",
{"test_id": [1, 2, 3], "title": ["Test #2", "Test #3", "Test #4"]}
).fetchone()["num"] == 2
# Large ammount of IN values
assert db.execute(
"SELECT COUNT(*) AS num FROM test WHERE ?",
{"not__test_id": list(range(2, 3000))}
).fetchone()["num"] == 2
assert db.execute(
"SELECT COUNT(*) AS num FROM test WHERE ?",
{"test_id": list(range(50, 3000))}
).fetchone()["num"] == 50
assert db.execute(
"SELECT COUNT(*) AS num FROM test WHERE ?",
{"not__title": ["Test #%s" % i for i in range(50, 3000)]}
).fetchone()["num"] == 50
assert db.execute(
"SELECT COUNT(*) AS num FROM test WHERE ?",
{"title__like": "%20%"}
).fetchone()["num"] == 1
# Test named parameter escaping
assert db.execute(
"SELECT COUNT(*) AS num FROM test WHERE test_id = :test_id AND title LIKE :titlelike",
{"test_id": 1, "titlelike": "Test%"}
).fetchone()["num"] == 1
def testEscaping(self, db):
# Test insert
for i in range(100):
db.execute("INSERT INTO test ?", {"test_id": i, "title": "Test '\" #%s" % i})
assert db.execute(
"SELECT COUNT(*) AS num FROM test WHERE ?",
{"title": "Test '\" #1"}
).fetchone()["num"] == 1
assert db.execute(
"SELECT COUNT(*) AS num FROM test WHERE ?",
{"title": ["Test '\" #%s" % i for i in range(0, 50)]}
).fetchone()["num"] == 50
assert db.execute(
"SELECT COUNT(*) AS num FROM test WHERE ?",
{"not__title": ["Test '\" #%s" % i for i in range(50, 3000)]}
).fetchone()["num"] == 50
def testUpdateJson(self, db):
f = io.BytesIO()
f.write("""
{
"test": [
{"test_id": 1, "title": "Test 1 title", "extra col": "Ignore it"}
]
}
""".encode())
f.seek(0)
assert db.updateJson(db.db_dir + "data.json", f) is True
assert db.execute("SELECT COUNT(*) AS num FROM test_importfilter").fetchone()["num"] == 1
assert db.execute("SELECT COUNT(*) AS num FROM test").fetchone()["num"] == 1
def testUnsafePattern(self, db):
db.schema["maps"] = {"[A-Za-z.]*": db.schema["maps"]["data.json"]} # Only repetition of . supported
f = io.StringIO()
f.write("""
{
"test": [
{"test_id": 1, "title": "Test 1 title", "extra col": "Ignore it"}
]
}
""")
f.seek(0)
assert db.updateJson(db.db_dir + "data.json", f) is False
assert db.execute("SELECT COUNT(*) AS num FROM test_importfilter").fetchone()["num"] == 0
assert db.execute("SELECT COUNT(*) AS num FROM test").fetchone()["num"] == 0

View File

@ -1,31 +0,0 @@
import re
from Db.DbQuery import DbQuery
class TestDbQuery:
def testParse(self):
query_text = """
SELECT
'comment' AS type,
date_added, post.title AS title,
keyvalue.value || ': ' || comment.body AS body,
'?Post:' || comment.post_id || '#Comments' AS url
FROM
comment
LEFT JOIN json USING (json_id)
LEFT JOIN json AS json_content ON (json_content.directory = json.directory AND json_content.file_name='content.json')
LEFT JOIN keyvalue ON (keyvalue.json_id = json_content.json_id AND key = 'cert_user_id')
LEFT JOIN post ON (comment.post_id = post.post_id)
WHERE
post.date_added > 123
ORDER BY
date_added DESC
LIMIT 20
"""
query = DbQuery(query_text)
assert query.parts["LIMIT"] == "20"
assert query.fields["body"] == "keyvalue.value || ': ' || comment.body"
assert re.sub("[ \r\n]", "", str(query)) == re.sub("[ \r\n]", "", query_text)
query.wheres.append("body LIKE '%hello%'")
assert "body LIKE '%hello%'" in str(query)

View File

@ -1,52 +0,0 @@
from Debug import Debug
import gevent
import os
import re
import pytest
class TestDebug:
@pytest.mark.parametrize("items,expected", [
(["@/src/A/B/C.py:17"], ["A/B/C.py line 17"]), # basic test
(["@/src/Db/Db.py:17"], ["Db.py line 17"]), # path compression
(["%s:1" % __file__], ["TestDebug.py line 1"]),
(["@/plugins/Chart/ChartDb.py:100"], ["ChartDb.py line 100"]), # plugins
(["@/main.py:17"], ["main.py line 17"]), # root
(["@\\src\\Db\\__init__.py:17"], ["Db/__init__.py line 17"]), # Windows paths
(["<frozen importlib._bootstrap>:1"], []), # importlib builtins
(["<frozen importlib._bootstrap_external>:1"], []), # importlib builtins
(["/home/ivanq/ZeroNet/src/main.py:13"], ["?/src/main.py line 13"]), # best-effort anonymization
(["C:\\ZeroNet\\core\\src\\main.py:13"], ["?/src/main.py line 13"]),
(["/root/main.py:17"], ["/root/main.py line 17"]),
(["{gevent}:13"], ["<gevent>/__init__.py line 13"]), # modules
(["{os}:13"], ["<os> line 13"]), # python builtin modules
(["src/gevent/event.py:17"], ["<gevent>/event.py line 17"]), # gevent-overriden __file__
(["@/src/Db/Db.py:17", "@/src/Db/DbQuery.py:1"], ["Db.py line 17", "DbQuery.py line 1"]), # mutliple args
(["@/src/Db/Db.py:17", "@/src/Db/Db.py:1"], ["Db.py line 17", "1"]), # same file
(["{os}:1", "@/src/Db/Db.py:17"], ["<os> line 1", "Db.py line 17"]), # builtins
(["{gevent}:1"] + ["{os}:3"] * 4 + ["@/src/Db/Db.py:17"], ["<gevent>/__init__.py line 1", "...", "Db.py line 17"])
])
def testFormatTraceback(self, items, expected):
q_items = []
for item in items:
file, line = item.rsplit(":", 1)
if file.startswith("@"):
file = Debug.root_dir + file[1:]
file = file.replace("{os}", os.__file__)
file = file.replace("{gevent}", gevent.__file__)
q_items.append((file, int(line)))
assert Debug.formatTraceback(q_items) == expected
def testFormatException(self):
try:
raise ValueError("Test exception")
except Exception:
assert re.match(r"ValueError: Test exception in TestDebug.py line [0-9]+", Debug.formatException())
try:
os.path.abspath(1)
except Exception:
assert re.search(r"in TestDebug.py line [0-9]+ > <(posixpath|ntpath)> line ", Debug.formatException())
def testFormatStack(self):
assert re.match(r"TestDebug.py line [0-9]+ > <_pytest>/python.py line [0-9]+", Debug.formatStack())

View File

@ -1,58 +0,0 @@
import io
from util import Diff
class TestDiff:
def testDiff(self):
assert Diff.diff(
[],
["one", "two", "three"]
) == [("+", ["one", "two","three"])]
assert Diff.diff(
["one", "two", "three"],
["one", "two", "three", "four", "five"]
) == [("=", 11), ("+", ["four", "five"])]
assert Diff.diff(
["one", "two", "three", "six"],
["one", "two", "three", "four", "five", "six"]
) == [("=", 11), ("+", ["four", "five"]), ("=", 3)]
assert Diff.diff(
["one", "two", "three", "hmm", "six"],
["one", "two", "three", "four", "five", "six"]
) == [("=", 11), ("-", 3), ("+", ["four", "five"]), ("=", 3)]
assert Diff.diff(
["one", "two", "three"],
[]
) == [("-", 11)]
def testUtf8(self):
assert Diff.diff(
["one", "\xe5\xad\xa6\xe4\xb9\xa0\xe4\xb8\x8b", "two", "three"],
["one", "\xe5\xad\xa6\xe4\xb9\xa0\xe4\xb8\x8b", "two", "three", "four", "five"]
) == [("=", 20), ("+", ["four", "five"])]
def testDiffLimit(self):
old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix")
new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix")
actions = Diff.diff(list(old_f), list(new_f), limit=1024)
assert actions
old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix")
new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix"*1024)
actions = Diff.diff(list(old_f), list(new_f), limit=1024)
assert actions is False
def testPatch(self):
old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix")
new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix")
actions = Diff.diff(
list(old_f),
list(new_f)
)
old_f.seek(0)
assert Diff.patch(old_f, actions).getvalue() == new_f.getvalue()

View File

@ -1,65 +0,0 @@
import util
class ExampleClass(object):
def __init__(self):
self.called = []
self.onChanged = util.Event()
def increment(self, title):
self.called.append(title)
class TestEvent:
def testEvent(self):
test_obj = ExampleClass()
test_obj.onChanged.append(lambda: test_obj.increment("Called #1"))
test_obj.onChanged.append(lambda: test_obj.increment("Called #2"))
test_obj.onChanged.once(lambda: test_obj.increment("Once"))
assert test_obj.called == []
test_obj.onChanged()
assert test_obj.called == ["Called #1", "Called #2", "Once"]
test_obj.onChanged()
test_obj.onChanged()
assert test_obj.called == ["Called #1", "Called #2", "Once", "Called #1", "Called #2", "Called #1", "Called #2"]
def testOnce(self):
test_obj = ExampleClass()
test_obj.onChanged.once(lambda: test_obj.increment("Once test #1"))
# It should be called only once
assert test_obj.called == []
test_obj.onChanged()
assert test_obj.called == ["Once test #1"]
test_obj.onChanged()
test_obj.onChanged()
assert test_obj.called == ["Once test #1"]
def testOnceMultiple(self):
test_obj = ExampleClass()
# Allow queue more than once
test_obj.onChanged.once(lambda: test_obj.increment("Once test #1"))
test_obj.onChanged.once(lambda: test_obj.increment("Once test #2"))
test_obj.onChanged.once(lambda: test_obj.increment("Once test #3"))
assert test_obj.called == []
test_obj.onChanged()
assert test_obj.called == ["Once test #1", "Once test #2", "Once test #3"]
test_obj.onChanged()
test_obj.onChanged()
assert test_obj.called == ["Once test #1", "Once test #2", "Once test #3"]
def testOnceNamed(self):
test_obj = ExampleClass()
# Dont store more that one from same type
test_obj.onChanged.once(lambda: test_obj.increment("Once test #1/1"), "type 1")
test_obj.onChanged.once(lambda: test_obj.increment("Once test #1/2"), "type 1")
test_obj.onChanged.once(lambda: test_obj.increment("Once test #2"), "type 2")
assert test_obj.called == []
test_obj.onChanged()
assert test_obj.called == ["Once test #1/1", "Once test #2"]
test_obj.onChanged()
test_obj.onChanged()
assert test_obj.called == ["Once test #1/1", "Once test #2"]

View File

@ -1,124 +0,0 @@
import io
import pytest
import time
from Connection import ConnectionServer
from Connection import Connection
from File import FileServer
@pytest.mark.usefixtures("resetSettings")
@pytest.mark.usefixtures("resetTempSettings")
class TestFileRequest:
def testGetFile(self, file_server, site):
file_server.ip_incoming = {} # Reset flood protection
client = ConnectionServer(file_server.ip, 1545)
connection = client.getConnection(file_server.ip, 1544)
file_server.sites[site.address] = site
# Normal request
response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0})
assert b"sign" in response["body"]
response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": site.storage.getSize("content.json")})
assert b"sign" in response["body"]
# Invalid file
response = connection.request("getFile", {"site": site.address, "inner_path": "invalid.file", "location": 0})
assert "File read error" in response["error"]
# Location over size
response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 1024 * 1024})
assert "File read error" in response["error"]
# Stream from parent dir
response = connection.request("getFile", {"site": site.address, "inner_path": "../users.json", "location": 0})
assert "File read exception" in response["error"]
# Invalid site
response = connection.request("getFile", {"site": "", "inner_path": "users.json", "location": 0})
assert "Unknown site" in response["error"]
response = connection.request("getFile", {"site": ".", "inner_path": "users.json", "location": 0})
assert "Unknown site" in response["error"]
# Invalid size
response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": 1234})
assert "File size does not match" in response["error"]
# Invalid path
for path in ["../users.json", "./../users.json", "data/../content.json", ".../users.json"]:
for sep in ["/", "\\"]:
response = connection.request("getFile", {"site": site.address, "inner_path": path.replace("/", sep), "location": 0})
assert response["error"] == 'File read exception'
connection.close()
client.stop()
def testStreamFile(self, file_server, site):
file_server.ip_incoming = {} # Reset flood protection
client = ConnectionServer(file_server.ip, 1545)
connection = client.getConnection(file_server.ip, 1544)
file_server.sites[site.address] = site
buff = io.BytesIO()
response = connection.request("streamFile", {"site": site.address, "inner_path": "content.json", "location": 0}, buff)
assert "stream_bytes" in response
assert b"sign" in buff.getvalue()
# Invalid file
buff = io.BytesIO()
response = connection.request("streamFile", {"site": site.address, "inner_path": "invalid.file", "location": 0}, buff)
assert "File read error" in response["error"]
# Location over size
buff = io.BytesIO()
response = connection.request(
"streamFile", {"site": site.address, "inner_path": "content.json", "location": 1024 * 1024}, buff
)
assert "File read error" in response["error"]
# Stream from parent dir
buff = io.BytesIO()
response = connection.request("streamFile", {"site": site.address, "inner_path": "../users.json", "location": 0}, buff)
assert "File read exception" in response["error"]
connection.close()
client.stop()
def testPex(self, file_server, site, site_temp):
file_server.sites[site.address] = site
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
connection = client.getConnection(file_server.ip, 1544)
# Add new fake peer to site
fake_peer = site.addPeer(file_server.ip_external, 11337, return_peer=True)
# Add fake connection to it
fake_peer.connection = Connection(file_server, file_server.ip_external, 11337)
fake_peer.connection.last_recv_time = time.time()
assert fake_peer in site.getConnectablePeers()
# Add file_server as peer to client
peer_file_server = site_temp.addPeer(file_server.ip, 1544)
assert "%s:11337" % file_server.ip_external not in site_temp.peers
assert peer_file_server.pex()
assert "%s:11337" % file_server.ip_external in site_temp.peers
# Should not exchange private peers from local network
fake_peer_private = site.addPeer("192.168.0.1", 11337, return_peer=True)
assert fake_peer_private not in site.getConnectablePeers(allow_private=False)
fake_peer_private.connection = Connection(file_server, "192.168.0.1", 11337)
fake_peer_private.connection.last_recv_time = time.time()
assert "192.168.0.1:11337" not in site_temp.peers
assert not peer_file_server.pex()
assert "192.168.0.1:11337" not in site_temp.peers
connection.close()
client.stop()

View File

@ -1,39 +0,0 @@
import os
import pytest
from util.Flag import Flag
class TestFlag:
def testFlagging(self):
flag = Flag()
@flag.admin
@flag.no_multiuser
def testFn(anything):
return anything
assert "admin" in flag.db["testFn"]
assert "no_multiuser" in flag.db["testFn"]
def testSubclassedFlagging(self):
flag = Flag()
class Test:
@flag.admin
@flag.no_multiuser
def testFn(anything):
return anything
class SubTest(Test):
pass
assert "admin" in flag.db["testFn"]
assert "no_multiuser" in flag.db["testFn"]
def testInvalidFlag(self):
flag = Flag()
with pytest.raises(Exception) as err:
@flag.no_multiuser
@flag.unknown_flag
def testFn(anything):
return anything
assert "Invalid flag" in str(err.value)

View File

@ -1,79 +0,0 @@
import socket
import struct
import os
import pytest
from util import helper
from Config import config
@pytest.mark.usefixtures("resetSettings")
class TestHelper:
def testShellquote(self):
assert helper.shellquote("hel'lo") == "\"hel'lo\"" # Allow '
assert helper.shellquote('hel"lo') == '"hello"' # Remove "
assert helper.shellquote("hel'lo", 'hel"lo') == ('"hel\'lo"', '"hello"')
def testPackAddress(self):
for port in [1, 1000, 65535]:
for ip in ["1.1.1.1", "127.0.0.1", "0.0.0.0", "255.255.255.255", "192.168.1.1"]:
assert len(helper.packAddress(ip, port)) == 6
assert helper.unpackAddress(helper.packAddress(ip, port)) == (ip, port)
for ip in ["1:2:3:4:5:6:7:8", "::1", "2001:19f0:6c01:e76:5400:1ff:fed6:3eca", "2001:4860:4860::8888"]:
assert len(helper.packAddress(ip, port)) == 18
assert helper.unpackAddress(helper.packAddress(ip, port)) == (ip, port)
assert len(helper.packOnionAddress("boot3rdez4rzn36x.onion", port)) == 12
assert helper.unpackOnionAddress(helper.packOnionAddress("boot3rdez4rzn36x.onion", port)) == ("boot3rdez4rzn36x.onion", port)
with pytest.raises(struct.error):
helper.packAddress("1.1.1.1", 100000)
with pytest.raises(socket.error):
helper.packAddress("999.1.1.1", 1)
with pytest.raises(Exception):
helper.unpackAddress("X")
def testGetDirname(self):
assert helper.getDirname("data/users/content.json") == "data/users/"
assert helper.getDirname("data/users") == "data/"
assert helper.getDirname("") == ""
assert helper.getDirname("content.json") == ""
assert helper.getDirname("data/users/") == "data/users/"
assert helper.getDirname("/data/users/content.json") == "data/users/"
def testGetFilename(self):
assert helper.getFilename("data/users/content.json") == "content.json"
assert helper.getFilename("data/users") == "users"
assert helper.getFilename("") == ""
assert helper.getFilename("content.json") == "content.json"
assert helper.getFilename("data/users/") == ""
assert helper.getFilename("/data/users/content.json") == "content.json"
def testIsIp(self):
assert helper.isIp("1.2.3.4")
assert helper.isIp("255.255.255.255")
assert not helper.isIp("any.host")
assert not helper.isIp("1.2.3.4.com")
assert not helper.isIp("1.2.3.4.any.host")
def testIsPrivateIp(self):
assert helper.isPrivateIp("192.168.1.1")
assert not helper.isPrivateIp("1.1.1.1")
assert helper.isPrivateIp("fe80::44f0:3d0:4e6:637c")
assert not helper.isPrivateIp("fca5:95d6:bfde:d902:8951:276e:1111:a22c") # cjdns
def testOpenLocked(self):
locked_f = helper.openLocked(config.data_dir + "/locked.file")
assert locked_f
with pytest.raises(BlockingIOError):
locked_f_again = helper.openLocked(config.data_dir + "/locked.file")
locked_f_different = helper.openLocked(config.data_dir + "/locked_different.file")
locked_f.close()
locked_f_different.close()
os.unlink(locked_f.name)
os.unlink(locked_f_different.name)

View File

@ -1,88 +0,0 @@
import io
import os
import msgpack
import pytest
from Config import config
from util import Msgpack
from collections import OrderedDict
class TestMsgpack:
test_data = OrderedDict(
sorted({"cmd": "fileGet", "bin": b'p\x81zDhL\xf0O\xd0\xaf', "params": {"site": "1Site"}, "utf8": b'\xc3\xa1rv\xc3\xadzt\xc5\xb1r\xc5\x91'.decode("utf8"), "list": [b'p\x81zDhL\xf0O\xd0\xaf', b'p\x81zDhL\xf0O\xd0\xaf']}.items())
)
def testPacking(self):
assert Msgpack.pack(self.test_data) == b'\x85\xa3bin\xc4\np\x81zDhL\xf0O\xd0\xaf\xa3cmd\xa7fileGet\xa4list\x92\xc4\np\x81zDhL\xf0O\xd0\xaf\xc4\np\x81zDhL\xf0O\xd0\xaf\xa6params\x81\xa4site\xa51Site\xa4utf8\xad\xc3\xa1rv\xc3\xadzt\xc5\xb1r\xc5\x91'
assert Msgpack.pack(self.test_data, use_bin_type=False) == b'\x85\xa3bin\xaap\x81zDhL\xf0O\xd0\xaf\xa3cmd\xa7fileGet\xa4list\x92\xaap\x81zDhL\xf0O\xd0\xaf\xaap\x81zDhL\xf0O\xd0\xaf\xa6params\x81\xa4site\xa51Site\xa4utf8\xad\xc3\xa1rv\xc3\xadzt\xc5\xb1r\xc5\x91'
def testUnpackinkg(self):
assert Msgpack.unpack(Msgpack.pack(self.test_data)) == self.test_data
@pytest.mark.parametrize("unpacker_class", [msgpack.Unpacker, msgpack.fallback.Unpacker])
def testUnpacker(self, unpacker_class):
unpacker = unpacker_class(raw=False)
data = msgpack.packb(self.test_data, use_bin_type=True)
data += msgpack.packb(self.test_data, use_bin_type=True)
messages = []
for char in data:
unpacker.feed(bytes([char]))
for message in unpacker:
messages.append(message)
assert len(messages) == 2
assert messages[0] == self.test_data
assert messages[0] == messages[1]
def testStreaming(self):
bin_data = os.urandom(20)
f = Msgpack.FilePart("%s/users.json" % config.data_dir, "rb")
f.read_bytes = 30
data = {"cmd": "response", "body": f, "bin": bin_data}
out_buff = io.BytesIO()
Msgpack.stream(data, out_buff.write)
out_buff.seek(0)
data_packb = {
"cmd": "response",
"body": open("%s/users.json" % config.data_dir, "rb").read(30),
"bin": bin_data
}
out_buff.seek(0)
data_unpacked = Msgpack.unpack(out_buff.read())
assert data_unpacked == data_packb
assert data_unpacked["cmd"] == "response"
assert type(data_unpacked["body"]) == bytes
def testBackwardCompatibility(self):
packed = {}
packed["py3"] = Msgpack.pack(self.test_data, use_bin_type=False)
packed["py3_bin"] = Msgpack.pack(self.test_data, use_bin_type=True)
for key, val in packed.items():
unpacked = Msgpack.unpack(val)
type(unpacked["utf8"]) == str
type(unpacked["bin"]) == bytes
# Packed with use_bin_type=False (pre-ZeroNet 0.7.0)
unpacked = Msgpack.unpack(packed["py3"], decode=True)
type(unpacked["utf8"]) == str
type(unpacked["bin"]) == bytes
assert len(unpacked["utf8"]) == 9
assert len(unpacked["bin"]) == 10
with pytest.raises(UnicodeDecodeError) as err: # Try to decode binary as utf-8
unpacked = Msgpack.unpack(packed["py3"], decode=False)
# Packed with use_bin_type=True
unpacked = Msgpack.unpack(packed["py3_bin"], decode=False)
type(unpacked["utf8"]) == str
type(unpacked["bin"]) == bytes
assert len(unpacked["utf8"]) == 9
assert len(unpacked["bin"]) == 10

View File

@ -1,167 +0,0 @@
import time
import gevent
import pytest
import util
from util import ThreadPool
@pytest.fixture(params=['gevent.spawn', 'thread_pool.spawn'])
def queue_spawn(request):
thread_pool = ThreadPool.ThreadPool(10)
if request.param == "gevent.spawn":
return gevent.spawn
else:
return thread_pool.spawn
class ExampleClass(object):
def __init__(self):
self.counted = 0
@util.Noparallel()
def countBlocking(self, num=5):
for i in range(1, num + 1):
time.sleep(0.1)
self.counted += 1
return "counted:%s" % i
@util.Noparallel(queue=True, ignore_class=True)
def countQueue(self, num=5):
for i in range(1, num + 1):
time.sleep(0.1)
self.counted += 1
return "counted:%s" % i
@util.Noparallel(blocking=False)
def countNoblocking(self, num=5):
for i in range(1, num + 1):
time.sleep(0.01)
self.counted += 1
return "counted:%s" % i
class TestNoparallel:
def testBlocking(self, queue_spawn):
obj1 = ExampleClass()
obj2 = ExampleClass()
# Dont allow to call again until its running and wait until its running
threads = [
queue_spawn(obj1.countBlocking),
queue_spawn(obj1.countBlocking),
queue_spawn(obj1.countBlocking),
queue_spawn(obj2.countBlocking)
]
assert obj2.countBlocking() == "counted:5" # The call is ignored as obj2.countBlocking already counting, but block until its finishes
gevent.joinall(threads)
assert [thread.value for thread in threads] == ["counted:5", "counted:5", "counted:5", "counted:5"]
obj2.countBlocking() # Allow to call again as obj2.countBlocking finished
assert obj1.counted == 5
assert obj2.counted == 10
def testNoblocking(self):
obj1 = ExampleClass()
thread1 = obj1.countNoblocking()
thread2 = obj1.countNoblocking() # Ignored
assert obj1.counted == 0
time.sleep(0.1)
assert thread1.value == "counted:5"
assert thread2.value == "counted:5"
assert obj1.counted == 5
obj1.countNoblocking().join() # Allow again and wait until finishes
assert obj1.counted == 10
def testQueue(self, queue_spawn):
obj1 = ExampleClass()
queue_spawn(obj1.countQueue, num=1)
queue_spawn(obj1.countQueue, num=1)
queue_spawn(obj1.countQueue, num=1)
time.sleep(0.3)
assert obj1.counted == 2 # No multi-queue supported
obj2 = ExampleClass()
queue_spawn(obj2.countQueue, num=10)
queue_spawn(obj2.countQueue, num=10)
time.sleep(1.5) # Call 1 finished, call 2 still working
assert 10 < obj2.counted < 20
queue_spawn(obj2.countQueue, num=10)
time.sleep(2.0)
assert obj2.counted == 30
def testQueueOverload(self):
obj1 = ExampleClass()
threads = []
for i in range(1000):
thread = gevent.spawn(obj1.countQueue, num=5)
threads.append(thread)
gevent.joinall(threads)
assert obj1.counted == 5 * 2 # Only called twice (no multi-queue allowed)
def testIgnoreClass(self, queue_spawn):
obj1 = ExampleClass()
obj2 = ExampleClass()
threads = [
queue_spawn(obj1.countQueue),
queue_spawn(obj1.countQueue),
queue_spawn(obj1.countQueue),
queue_spawn(obj2.countQueue),
queue_spawn(obj2.countQueue)
]
s = time.time()
time.sleep(0.001)
gevent.joinall(threads)
# Queue limited to 2 calls (every call takes counts to 5 and takes 0.05 sec)
assert obj1.counted + obj2.counted == 10
taken = time.time() - s
assert 1.2 > taken >= 1.0 # 2 * 0.5s count = ~1s
def testException(self, queue_spawn):
class MyException(Exception):
pass
@util.Noparallel()
def raiseException():
raise MyException("Test error!")
with pytest.raises(MyException) as err:
raiseException()
assert str(err.value) == "Test error!"
with pytest.raises(MyException) as err:
queue_spawn(raiseException).get()
assert str(err.value) == "Test error!"
def testMultithreadMix(self, queue_spawn):
obj1 = ExampleClass()
with ThreadPool.ThreadPool(10) as thread_pool:
s = time.time()
t1 = queue_spawn(obj1.countBlocking, 5)
time.sleep(0.01)
t2 = thread_pool.spawn(obj1.countBlocking, 5)
time.sleep(0.01)
t3 = thread_pool.spawn(obj1.countBlocking, 5)
time.sleep(0.3)
t4 = gevent.spawn(obj1.countBlocking, 5)
threads = [t1, t2, t3, t4]
for thread in threads:
assert thread.get() == "counted:5"
time_taken = time.time() - s
assert obj1.counted == 5
assert 0.5 < time_taken < 0.7

View File

@ -1,159 +0,0 @@
import time
import io
import pytest
from File import FileServer
from File import FileRequest
from Crypt import CryptHash
from . import Spy
@pytest.mark.usefixtures("resetSettings")
@pytest.mark.usefixtures("resetTempSettings")
class TestPeer:
def testPing(self, file_server, site, site_temp):
file_server.sites[site.address] = site
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
connection = client.getConnection(file_server.ip, 1544)
# Add file_server as peer to client
peer_file_server = site_temp.addPeer(file_server.ip, 1544)
assert peer_file_server.ping() is not None
assert peer_file_server in site_temp.peers.values()
peer_file_server.remove()
assert peer_file_server not in site_temp.peers.values()
connection.close()
client.stop()
def testDownloadFile(self, file_server, site, site_temp):
file_server.sites[site.address] = site
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
connection = client.getConnection(file_server.ip, 1544)
# Add file_server as peer to client
peer_file_server = site_temp.addPeer(file_server.ip, 1544)
# Testing streamFile
buff = peer_file_server.getFile(site_temp.address, "content.json", streaming=True)
assert b"sign" in buff.getvalue()
# Testing getFile
buff = peer_file_server.getFile(site_temp.address, "content.json")
assert b"sign" in buff.getvalue()
connection.close()
client.stop()
def testHashfield(self, site):
sample_hash = list(site.content_manager.contents["content.json"]["files_optional"].values())[0]["sha512"]
site.storage.verifyFiles(quick_check=True) # Find what optional files we have
# Check if hashfield has any files
assert site.content_manager.hashfield
assert len(site.content_manager.hashfield) > 0
# Check exsist hash
assert site.content_manager.hashfield.getHashId(sample_hash) in site.content_manager.hashfield
# Add new hash
new_hash = CryptHash.sha512sum(io.BytesIO(b"hello"))
assert site.content_manager.hashfield.getHashId(new_hash) not in site.content_manager.hashfield
assert site.content_manager.hashfield.appendHash(new_hash)
assert not site.content_manager.hashfield.appendHash(new_hash) # Don't add second time
assert site.content_manager.hashfield.getHashId(new_hash) in site.content_manager.hashfield
# Remove new hash
assert site.content_manager.hashfield.removeHash(new_hash)
assert site.content_manager.hashfield.getHashId(new_hash) not in site.content_manager.hashfield
def testHashfieldExchange(self, file_server, site, site_temp):
server1 = file_server
server1.sites[site.address] = site
site.connection_server = server1
server2 = FileServer(file_server.ip, 1545)
server2.sites[site_temp.address] = site_temp
site_temp.connection_server = server2
site.storage.verifyFiles(quick_check=True) # Find what optional files we have
# Add file_server as peer to client
server2_peer1 = site_temp.addPeer(file_server.ip, 1544)
# Check if hashfield has any files
assert len(site.content_manager.hashfield) > 0
# Testing hashfield sync
assert len(server2_peer1.hashfield) == 0
assert server2_peer1.updateHashfield() # Query hashfield from peer
assert len(server2_peer1.hashfield) > 0
# Test force push new hashfield
site_temp.content_manager.hashfield.appendHash("AABB")
server1_peer2 = site.addPeer(file_server.ip, 1545, return_peer=True)
with Spy.Spy(FileRequest, "route") as requests:
assert len(server1_peer2.hashfield) == 0
server2_peer1.sendMyHashfield()
assert len(server1_peer2.hashfield) == 1
server2_peer1.sendMyHashfield() # Hashfield not changed, should be ignored
assert len(requests) == 1
time.sleep(0.01) # To make hashfield change date different
site_temp.content_manager.hashfield.appendHash("AACC")
server2_peer1.sendMyHashfield() # Push hashfield
assert len(server1_peer2.hashfield) == 2
assert len(requests) == 2
site_temp.content_manager.hashfield.appendHash("AADD")
assert server1_peer2.updateHashfield(force=True) # Request hashfield
assert len(server1_peer2.hashfield) == 3
assert len(requests) == 3
assert not server2_peer1.sendMyHashfield() # Not changed, should be ignored
assert len(requests) == 3
server2.stop()
def testFindHash(self, file_server, site, site_temp):
file_server.sites[site.address] = site
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
# Add file_server as peer to client
peer_file_server = site_temp.addPeer(file_server.ip, 1544)
assert peer_file_server.findHashIds([1234]) == {}
# Add fake peer with requred hash
fake_peer_1 = site.addPeer(file_server.ip_external, 1544)
fake_peer_1.hashfield.append(1234)
fake_peer_2 = site.addPeer("1.2.3.5", 1545)
fake_peer_2.hashfield.append(1234)
fake_peer_2.hashfield.append(1235)
fake_peer_3 = site.addPeer("1.2.3.6", 1546)
fake_peer_3.hashfield.append(1235)
fake_peer_3.hashfield.append(1236)
res = peer_file_server.findHashIds([1234, 1235])
assert sorted(res[1234]) == sorted([(file_server.ip_external, 1544), ("1.2.3.5", 1545)])
assert sorted(res[1235]) == sorted([("1.2.3.5", 1545), ("1.2.3.6", 1546)])
# Test my address adding
site.content_manager.hashfield.append(1234)
res = peer_file_server.findHashIds([1234, 1235])
assert sorted(res[1234]) == sorted([(file_server.ip_external, 1544), ("1.2.3.5", 1545), (file_server.ip, 1544)])
assert sorted(res[1235]) == sorted([("1.2.3.5", 1545), ("1.2.3.6", 1546)])

View File

@ -1,100 +0,0 @@
import time
import gevent
from util import RateLimit
# Time is around limit +/- 0.05 sec
def around(t, limit):
return t >= limit - 0.05 and t <= limit + 0.05
class ExampleClass(object):
def __init__(self):
self.counted = 0
self.last_called = None
def count(self, back="counted"):
self.counted += 1
self.last_called = back
return back
class TestRateLimit:
def testCall(self):
obj1 = ExampleClass()
obj2 = ExampleClass()
s = time.time()
assert RateLimit.call("counting", allowed_again=0.1, func=obj1.count) == "counted"
assert around(time.time() - s, 0.0) # First allow to call instantly
assert obj1.counted == 1
# Call again
assert not RateLimit.isAllowed("counting", 0.1)
assert RateLimit.isAllowed("something else", 0.1)
assert RateLimit.call("counting", allowed_again=0.1, func=obj1.count) == "counted"
assert around(time.time() - s, 0.1) # Delays second call within interval
assert obj1.counted == 2
time.sleep(0.1) # Wait the cooldown time
# Call 3 times async
s = time.time()
assert obj2.counted == 0
threads = [
gevent.spawn(lambda: RateLimit.call("counting", allowed_again=0.1, func=obj2.count)), # Instant
gevent.spawn(lambda: RateLimit.call("counting", allowed_again=0.1, func=obj2.count)), # 0.1s delay
gevent.spawn(lambda: RateLimit.call("counting", allowed_again=0.1, func=obj2.count)) # 0.2s delay
]
gevent.joinall(threads)
assert [thread.value for thread in threads] == ["counted", "counted", "counted"]
assert around(time.time() - s, 0.2)
# Wait 0.1s cooldown
assert not RateLimit.isAllowed("counting", 0.1)
time.sleep(0.11)
assert RateLimit.isAllowed("counting", 0.1)
# No queue = instant again
s = time.time()
assert RateLimit.isAllowed("counting", 0.1)
assert RateLimit.call("counting", allowed_again=0.1, func=obj2.count) == "counted"
assert around(time.time() - s, 0.0)
assert obj2.counted == 4
def testCallAsync(self):
obj1 = ExampleClass()
obj2 = ExampleClass()
s = time.time()
RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #1").join()
assert obj1.counted == 1 # First instant
assert around(time.time() - s, 0.0)
# After that the calls delayed
s = time.time()
t1 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #2") # Dumped by the next call
time.sleep(0.03)
t2 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #3") # Dumped by the next call
time.sleep(0.03)
t3 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #4") # Will be called
assert obj1.counted == 1 # Delay still in progress: Not called yet
t3.join()
assert t3.value == "call #4"
assert around(time.time() - s, 0.1)
# Only the last one called
assert obj1.counted == 2
assert obj1.last_called == "call #4"
# Just called, not allowed again
assert not RateLimit.isAllowed("counting async", 0.1)
s = time.time()
t4 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #5").join()
assert obj1.counted == 3
assert around(time.time() - s, 0.1)
assert not RateLimit.isAllowed("counting async", 0.1)
time.sleep(0.11)
assert RateLimit.isAllowed("counting async", 0.1)

View File

@ -1,24 +0,0 @@
from util import SafeRe
import pytest
class TestSafeRe:
def testSafeMatch(self):
assert SafeRe.match(
"((js|css)/(?!all.(js|css))|data/users/.*db|data/users/.*/.*|data/archived|.*.py)",
"js/ZeroTalk.coffee"
)
assert SafeRe.match(".+/data.json", "data/users/1J3rJ8ecnwH2EPYa6MrgZttBNc61ACFiCj/data.json")
@pytest.mark.parametrize("pattern", ["([a-zA-Z]+)*", "(a|aa)+*", "(a|a?)+", "(.*a){10}", "((?!json).)*$", r"(\w+\d+)+C"])
def testUnsafeMatch(self, pattern):
with pytest.raises(SafeRe.UnsafePatternError) as err:
SafeRe.match(pattern, "aaaaaaaaaaaaaaaaaaaaaaaa!")
assert "Potentially unsafe" in str(err.value)
@pytest.mark.parametrize("pattern", ["^(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)$"])
def testUnsafeRepetition(self, pattern):
with pytest.raises(SafeRe.UnsafePatternError) as err:
SafeRe.match(pattern, "aaaaaaaaaaaaaaaaaaaaaaaa!")
assert "More than" in str(err.value)

View File

@ -1,70 +0,0 @@
import shutil
import os
import pytest
from Site import SiteManager
TEST_DATA_PATH = "src/Test/testdata"
@pytest.mark.usefixtures("resetSettings")
class TestSite:
def testClone(self, site):
assert site.storage.directory == TEST_DATA_PATH + "/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"
# Remove old files
if os.path.isdir(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL"):
shutil.rmtree(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL")
assert not os.path.isfile(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL/content.json")
# Clone 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT to 15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc
new_site = site.clone(
"159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL", "5JU2p5h3R7B1WrbaEdEDNZR7YHqRLGcjNcqwqVQzX2H4SuNe2ee", address_index=1
)
# Check if clone was successful
assert new_site.address == "159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL"
assert new_site.storage.isFile("content.json")
assert new_site.storage.isFile("index.html")
assert new_site.storage.isFile("data/users/content.json")
assert new_site.storage.isFile("data/zeroblog.db")
assert new_site.storage.verifyFiles()["bad_files"] == [] # No bad files allowed
assert new_site.storage.query("SELECT * FROM keyvalue WHERE key = 'title'").fetchone()["value"] == "MyZeroBlog"
# Optional files should be removed
assert len(new_site.storage.loadJson("content.json").get("files_optional", {})) == 0
# Test re-cloning (updating)
# Changes in non-data files should be overwritten
new_site.storage.write("index.html", b"this will be overwritten")
assert new_site.storage.read("index.html") == b"this will be overwritten"
# Changes in data file should be kept after re-cloning
changed_contentjson = new_site.storage.loadJson("content.json")
changed_contentjson["description"] = "Update Description Test"
new_site.storage.writeJson("content.json", changed_contentjson)
changed_data = new_site.storage.loadJson("data/data.json")
changed_data["title"] = "UpdateTest"
new_site.storage.writeJson("data/data.json", changed_data)
# The update should be reflected to database
assert new_site.storage.query("SELECT * FROM keyvalue WHERE key = 'title'").fetchone()["value"] == "UpdateTest"
# Re-clone the site
site.log.debug("Re-cloning")
site.clone("159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL")
assert new_site.storage.loadJson("data/data.json")["title"] == "UpdateTest"
assert new_site.storage.loadJson("content.json")["description"] == "Update Description Test"
assert new_site.storage.read("index.html") != "this will be overwritten"
# Delete created files
new_site.storage.deleteFiles()
assert not os.path.isdir(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL")
# Delete from site registry
assert new_site.address in SiteManager.site_manager.sites
SiteManager.site_manager.delete(new_site.address)
assert new_site.address not in SiteManager.site_manager.sites

View File

@ -1,562 +0,0 @@
import time
import pytest
import mock
import gevent
import gevent.event
import os
from Connection import ConnectionServer
from Config import config
from File import FileRequest
from File import FileServer
from Site.Site import Site
from . import Spy
@pytest.mark.usefixtures("resetTempSettings")
@pytest.mark.usefixtures("resetSettings")
class TestSiteDownload:
def testRename(self, file_server, site, site_temp):
assert site.storage.directory == config.data_dir + "/" + site.address
assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
site_temp.addPeer(file_server.ip, 1544)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
assert site_temp.storage.isFile("content.json")
# Rename non-optional file
os.rename(site.storage.getPath("data/img/domain.png"), site.storage.getPath("data/img/domain-new.png"))
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
content = site.storage.loadJson("content.json")
assert "data/img/domain-new.png" in content["files"]
assert "data/img/domain.png" not in content["files"]
assert not site_temp.storage.isFile("data/img/domain-new.png")
assert site_temp.storage.isFile("data/img/domain.png")
settings_before = site_temp.settings
with Spy.Spy(FileRequest, "route") as requests:
site.publish()
time.sleep(0.1)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
assert "streamFile" not in [req[1] for req in requests]
content = site_temp.storage.loadJson("content.json")
assert "data/img/domain-new.png" in content["files"]
assert "data/img/domain.png" not in content["files"]
assert site_temp.storage.isFile("data/img/domain-new.png")
assert not site_temp.storage.isFile("data/img/domain.png")
assert site_temp.settings["size"] == settings_before["size"]
assert site_temp.settings["size_optional"] == settings_before["size_optional"]
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]
def testRenameOptional(self, file_server, site, site_temp):
assert site.storage.directory == config.data_dir + "/" + site.address
assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
site_temp.addPeer(file_server.ip, 1544)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
assert site_temp.settings["optional_downloaded"] == 0
site_temp.needFile("data/optional.txt")
assert site_temp.settings["optional_downloaded"] > 0
settings_before = site_temp.settings
hashfield_before = site_temp.content_manager.hashfield.tobytes()
# Rename optional file
os.rename(site.storage.getPath("data/optional.txt"), site.storage.getPath("data/optional-new.txt"))
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", remove_missing_optional=True)
content = site.storage.loadJson("content.json")
assert "data/optional-new.txt" in content["files_optional"]
assert "data/optional.txt" not in content["files_optional"]
assert not site_temp.storage.isFile("data/optional-new.txt")
assert site_temp.storage.isFile("data/optional.txt")
with Spy.Spy(FileRequest, "route") as requests:
site.publish()
time.sleep(0.1)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
assert "streamFile" not in [req[1] for req in requests]
content = site_temp.storage.loadJson("content.json")
assert "data/optional-new.txt" in content["files_optional"]
assert "data/optional.txt" not in content["files_optional"]
assert site_temp.storage.isFile("data/optional-new.txt")
assert not site_temp.storage.isFile("data/optional.txt")
assert site_temp.settings["size"] == settings_before["size"]
assert site_temp.settings["size_optional"] == settings_before["size_optional"]
assert site_temp.settings["optional_downloaded"] == settings_before["optional_downloaded"]
assert site_temp.content_manager.hashfield.tobytes() == hashfield_before
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]
def testArchivedDownload(self, file_server, site, site_temp):
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
# Download normally
site_temp.addPeer(file_server.ip, 1544)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"]
assert not bad_files
assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents
assert site_temp.storage.isFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json")
assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2
# Add archived data
assert "archived" not in site.content_manager.contents["data/users/content.json"]["user_contents"]
assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", time.time()-1)
site.content_manager.contents["data/users/content.json"]["user_contents"]["archived"] = {"1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q": time.time()}
site.content_manager.sign("data/users/content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
date_archived = site.content_manager.contents["data/users/content.json"]["user_contents"]["archived"]["1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q"]
assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived-1)
assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived)
assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived+1) # Allow user to update archived data later
# Push archived update
assert not "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
site.publish()
time.sleep(0.1)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
# The archived content should disappear from remote client
assert "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" not in site_temp.content_manager.contents
assert not site_temp.storage.isDir("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q")
assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 1
assert len(list(site_temp.storage.query("SELECT * FROM json WHERE directory LIKE '%1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q%'"))) == 0
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]
def testArchivedBeforeDownload(self, file_server, site, site_temp):
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
# Download normally
site_temp.addPeer(file_server.ip, 1544)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"]
assert not bad_files
assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents
assert site_temp.storage.isFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json")
assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2
# Add archived data
assert not "archived_before" in site.content_manager.contents["data/users/content.json"]["user_contents"]
assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", time.time()-1)
content_modification_time = site.content_manager.contents["data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json"]["modified"]
site.content_manager.contents["data/users/content.json"]["user_contents"]["archived_before"] = content_modification_time
site.content_manager.sign("data/users/content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
date_archived = site.content_manager.contents["data/users/content.json"]["user_contents"]["archived_before"]
assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived-1)
assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived)
assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived+1) # Allow user to update archived data later
# Push archived update
assert not "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
site.publish()
time.sleep(0.1)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
# The archived content should disappear from remote client
assert "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" not in site_temp.content_manager.contents
assert not site_temp.storage.isDir("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q")
assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 1
assert len(list(site_temp.storage.query("SELECT * FROM json WHERE directory LIKE '%1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q%'"))) == 0
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]
# Test when connected peer has the optional file
def testOptionalDownload(self, file_server, site, site_temp):
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = ConnectionServer(file_server.ip, 1545)
site_temp.connection_server = client
site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
site_temp.addPeer(file_server.ip, 1544)
# Download site
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
# Download optional data/optional.txt
site.storage.verifyFiles(quick_check=True) # Find what optional files we have
optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt")
assert site.content_manager.hashfield.hasHash(optional_file_info["sha512"])
assert not site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"])
assert not site_temp.storage.isFile("data/optional.txt")
assert site.storage.isFile("data/optional.txt")
site_temp.needFile("data/optional.txt")
assert site_temp.storage.isFile("data/optional.txt")
# Optional user file
assert not site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
optional_file_info = site_temp.content_manager.getFileInfo(
"data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif"
)
assert site.content_manager.hashfield.hasHash(optional_file_info["sha512"])
assert not site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"])
site_temp.needFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"])
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]
# Test when connected peer does not has the file, so ask him if he know someone who has it
def testFindOptional(self, file_server, site, site_temp):
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init full source server (has optional files)
site_full = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
file_server_full = FileServer(file_server.ip, 1546)
site_full.connection_server = file_server_full
def listen():
ConnectionServer.start(file_server_full)
ConnectionServer.listen(file_server_full)
gevent.spawn(listen)
time.sleep(0.001) # Port opening
file_server_full.sites[site_full.address] = site_full # Add site
site_full.storage.verifyFiles(quick_check=True) # Check optional files
site_full_peer = site.addPeer(file_server.ip, 1546) # Add it to source server
hashfield = site_full_peer.updateHashfield() # Update hashfield
assert len(site_full.content_manager.hashfield) == 8
assert hashfield
assert site_full.storage.isFile("data/optional.txt")
assert site_full.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert len(site_full_peer.hashfield) == 8
# Remove hashes from source server
for hash in list(site.content_manager.hashfield):
site.content_manager.hashfield.remove(hash)
# Init client server
site_temp.connection_server = ConnectionServer(file_server.ip, 1545)
site_temp.addPeer(file_server.ip, 1544) # Add source server
# Download normal files
site_temp.log.info("Start Downloading site")
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
# Download optional data/optional.txt
optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt")
optional_file_info2 = site_temp.content_manager.getFileInfo("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert not site_temp.storage.isFile("data/optional.txt")
assert not site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert not site.content_manager.hashfield.hasHash(optional_file_info["sha512"]) # Source server don't know he has the file
assert not site.content_manager.hashfield.hasHash(optional_file_info2["sha512"]) # Source server don't know he has the file
assert site_full_peer.hashfield.hasHash(optional_file_info["sha512"]) # Source full peer on source server has the file
assert site_full_peer.hashfield.hasHash(optional_file_info2["sha512"]) # Source full peer on source server has the file
assert site_full.content_manager.hashfield.hasHash(optional_file_info["sha512"]) # Source full server he has the file
assert site_full.content_manager.hashfield.hasHash(optional_file_info2["sha512"]) # Source full server he has the file
site_temp.log.info("Request optional files")
with Spy.Spy(FileRequest, "route") as requests:
# Request 2 file same time
threads = []
threads.append(site_temp.needFile("data/optional.txt", blocking=False))
threads.append(site_temp.needFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif", blocking=False))
gevent.joinall(threads)
assert len([request for request in requests if request[1] == "findHashIds"]) == 1 # findHashids should call only once
assert site_temp.storage.isFile("data/optional.txt")
assert site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert site_temp.storage.deleteFiles()
file_server_full.stop()
[connection.close() for connection in file_server.connections]
site_full.content_manager.contents.db.close("FindOptional test end")
def testUpdate(self, file_server, site, site_temp):
assert site.storage.directory == config.data_dir + "/" + site.address
assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
# Don't try to find peers from the net
site.announce = mock.MagicMock(return_value=True)
site_temp.announce = mock.MagicMock(return_value=True)
# Connect peers
site_temp.addPeer(file_server.ip, 1544)
# Download site from site to site_temp
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
assert len(site_temp.bad_files) == 1
# Update file
data_original = site.storage.open("data/data.json").read()
data_new = data_original.replace(b'"ZeroBlog"', b'"UpdatedZeroBlog"')
assert data_original != data_new
site.storage.open("data/data.json", "wb").write(data_new)
assert site.storage.open("data/data.json").read() == data_new
assert site_temp.storage.open("data/data.json").read() == data_original
site.log.info("Publish new data.json without patch")
# Publish without patch
with Spy.Spy(FileRequest, "route") as requests:
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
site.publish()
time.sleep(0.1)
site.log.info("Downloading site")
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
assert len([request for request in requests if request[1] in ("getFile", "streamFile")]) == 1
assert site_temp.storage.open("data/data.json").read() == data_new
# Close connection to avoid update spam limit
list(site.peers.values())[0].remove()
site.addPeer(file_server.ip, 1545)
list(site_temp.peers.values())[0].ping() # Connect back
time.sleep(0.1)
# Update with patch
data_new = data_original.replace(b'"ZeroBlog"', b'"PatchedZeroBlog"')
assert data_original != data_new
site.storage.open("data/data.json-new", "wb").write(data_new)
assert site.storage.open("data/data.json-new").read() == data_new
assert site_temp.storage.open("data/data.json").read() != data_new
# Generate diff
diffs = site.content_manager.getDiffs("content.json")
assert not site.storage.isFile("data/data.json-new") # New data file removed
assert site.storage.open("data/data.json").read() == data_new # -new postfix removed
assert "data/data.json" in diffs
assert diffs["data/data.json"] == [('=', 2), ('-', 29), ('+', [b'\t"title": "PatchedZeroBlog",\n']), ('=', 31102)]
# Publish with patch
site.log.info("Publish new data.json with patch")
with Spy.Spy(FileRequest, "route") as requests:
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
event_done = gevent.event.AsyncResult()
site.publish(diffs=diffs)
time.sleep(0.1)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
assert [request for request in requests if request[1] in ("getFile", "streamFile")] == []
assert site_temp.storage.open("data/data.json").read() == data_new
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]
def testBigUpdate(self, file_server, site, site_temp):
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
# Connect peers
site_temp.addPeer(file_server.ip, 1544)
# Download site from site to site_temp
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
assert list(site_temp.bad_files.keys()) == ["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"]
# Update file
data_original = site.storage.open("data/data.json").read()
data_new = data_original.replace(b'"ZeroBlog"', b'"PatchedZeroBlog"')
assert data_original != data_new
site.storage.open("data/data.json-new", "wb").write(data_new)
assert site.storage.open("data/data.json-new").read() == data_new
assert site_temp.storage.open("data/data.json").read() != data_new
# Generate diff
diffs = site.content_manager.getDiffs("content.json")
assert not site.storage.isFile("data/data.json-new") # New data file removed
assert site.storage.open("data/data.json").read() == data_new # -new postfix removed
assert "data/data.json" in diffs
content_json = site.storage.loadJson("content.json")
content_json["description"] = "BigZeroBlog" * 1024 * 10
site.storage.writeJson("content.json", content_json)
site.content_manager.loadContent("content.json", force=True)
# Publish with patch
site.log.info("Publish new data.json with patch")
with Spy.Spy(FileRequest, "route") as requests:
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
assert site.storage.getSize("content.json") > 10 * 1024 # Make it a big content.json
site.publish(diffs=diffs)
time.sleep(0.1)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
file_requests = [request for request in requests if request[1] in ("getFile", "streamFile")]
assert len(file_requests) == 1
assert site_temp.storage.open("data/data.json").read() == data_new
assert site_temp.storage.open("content.json").read() == site.storage.open("content.json").read()
# Test what happened if the content.json of the site is bigger than the site limit
def testHugeContentSiteUpdate(self, file_server, site, site_temp):
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
# Connect peers
site_temp.addPeer(file_server.ip, 1544)
# Download site from site to site_temp
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
site_temp.settings["size_limit"] = int(20 * 1024 *1024)
site_temp.saveSettings()
# Raise limit size to 20MB on site so it can be signed
site.settings["size_limit"] = int(20 * 1024 *1024)
site.saveSettings()
content_json = site.storage.loadJson("content.json")
content_json["description"] = "PartirUnJour" * 1024 * 1024
site.storage.writeJson("content.json", content_json)
changed, deleted = site.content_manager.loadContent("content.json", force=True)
# Make sure we have 2 differents content.json
assert site_temp.storage.open("content.json").read() != site.storage.open("content.json").read()
# Generate diff
diffs = site.content_manager.getDiffs("content.json")
# Publish with patch
site.log.info("Publish new content.json bigger than 10MB")
with Spy.Spy(FileRequest, "route") as requests:
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
assert site.storage.getSize("content.json") > 10 * 1024 * 1024 # verify it over 10MB
time.sleep(0.1)
site.publish(diffs=diffs)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
assert site_temp.storage.getSize("content.json") < site_temp.getSizeLimit() * 1024 * 1024
assert site_temp.storage.open("content.json").read() == site.storage.open("content.json").read()
def testUnicodeFilename(self, file_server, site, site_temp):
assert site.storage.directory == config.data_dir + "/" + site.address
assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
site_temp.addPeer(file_server.ip, 1544)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
site.storage.write("data/img/árvíztűrő.png", b"test")
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
content = site.storage.loadJson("content.json")
assert "data/img/árvíztűrő.png" in content["files"]
assert not site_temp.storage.isFile("data/img/árvíztűrő.png")
settings_before = site_temp.settings
with Spy.Spy(FileRequest, "route") as requests:
site.publish()
time.sleep(0.1)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
assert len([req[1] for req in requests if req[1] == "streamFile"]) == 1
content = site_temp.storage.loadJson("content.json")
assert "data/img/árvíztűrő.png" in content["files"]
assert site_temp.storage.isFile("data/img/árvíztűrő.png")
assert site_temp.settings["size"] == settings_before["size"]
assert site_temp.settings["size_optional"] == settings_before["size_optional"]
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]

View File

@ -1,25 +0,0 @@
import pytest
@pytest.mark.usefixtures("resetSettings")
class TestSiteStorage:
def testWalk(self, site):
# Rootdir
walk_root = list(site.storage.walk(""))
assert "content.json" in walk_root
assert "css/all.css" in walk_root
# Subdir
assert list(site.storage.walk("data-default")) == ["data.json", "users/content-default.json"]
def testList(self, site):
# Rootdir
list_root = list(site.storage.list(""))
assert "content.json" in list_root
assert "css/all.css" not in list_root
# Subdir
assert set(site.storage.list("data-default")) == set(["data.json", "users"])
def testDbRebuild(self, site):
assert site.storage.rebuildDb()

View File

@ -1,163 +0,0 @@
import time
import threading
import gevent
import pytest
from util import ThreadPool
class TestThreadPool:
def testExecutionOrder(self):
with ThreadPool.ThreadPool(4) as pool:
events = []
@pool.wrap
def blocker():
events.append("S")
out = 0
for i in range(10000000):
if i == 3000000:
events.append("M")
out += 1
events.append("D")
return out
threads = []
for i in range(3):
threads.append(gevent.spawn(blocker))
gevent.joinall(threads)
assert events == ["S"] * 3 + ["M"] * 3 + ["D"] * 3
res = blocker()
assert res == 10000000
def testLockBlockingSameThread(self):
lock = ThreadPool.Lock()
s = time.time()
def unlocker():
time.sleep(1)
lock.release()
gevent.spawn(unlocker)
lock.acquire(True)
lock.acquire(True, timeout=2)
unlock_taken = time.time() - s
assert 1.0 < unlock_taken < 1.5
def testLockBlockingDifferentThread(self):
lock = ThreadPool.Lock()
def locker():
lock.acquire(True)
time.sleep(0.5)
lock.release()
with ThreadPool.ThreadPool(10) as pool:
threads = [
pool.spawn(locker),
pool.spawn(locker),
gevent.spawn(locker),
pool.spawn(locker)
]
time.sleep(0.1)
s = time.time()
lock.acquire(True, 5.0)
unlock_taken = time.time() - s
assert 1.8 < unlock_taken < 2.2
gevent.joinall(threads)
def testMainLoopCallerThreadId(self):
main_thread_id = threading.current_thread().ident
with ThreadPool.ThreadPool(5) as pool:
def getThreadId(*args, **kwargs):
return threading.current_thread().ident
t = pool.spawn(getThreadId)
assert t.get() != main_thread_id
t = pool.spawn(lambda: ThreadPool.main_loop.call(getThreadId))
assert t.get() == main_thread_id
def testMainLoopCallerGeventSpawn(self):
main_thread_id = threading.current_thread().ident
with ThreadPool.ThreadPool(5) as pool:
def waiter():
time.sleep(1)
return threading.current_thread().ident
def geventSpawner():
event = ThreadPool.main_loop.call(gevent.spawn, waiter)
with pytest.raises(Exception) as greenlet_err:
event.get()
assert str(greenlet_err.value) == "cannot switch to a different thread"
waiter_thread_id = ThreadPool.main_loop.call(event.get)
return waiter_thread_id
s = time.time()
waiter_thread_id = pool.apply(geventSpawner)
assert main_thread_id == waiter_thread_id
time_taken = time.time() - s
assert 0.9 < time_taken < 1.2
def testEvent(self):
with ThreadPool.ThreadPool(5) as pool:
event = ThreadPool.Event()
def setter():
time.sleep(1)
event.set("done!")
def getter():
return event.get()
pool.spawn(setter)
t_gevent = gevent.spawn(getter)
t_pool = pool.spawn(getter)
s = time.time()
assert event.get() == "done!"
time_taken = time.time() - s
gevent.joinall([t_gevent, t_pool])
assert t_gevent.get() == "done!"
assert t_pool.get() == "done!"
assert 0.9 < time_taken < 1.2
with pytest.raises(Exception) as err:
event.set("another result")
assert "Event already has value" in str(err.value)
def testMemoryLeak(self):
import gc
thread_objs_before = [id(obj) for obj in gc.get_objects() if "threadpool" in str(type(obj))]
def worker():
time.sleep(0.1)
return "ok"
def poolTest():
with ThreadPool.ThreadPool(5) as pool:
for i in range(20):
pool.spawn(worker)
for i in range(5):
poolTest()
new_thread_objs = [obj for obj in gc.get_objects() if "threadpool" in str(type(obj)) and id(obj) not in thread_objs_before]
#print("New objs:", new_thread_objs, "run:", num_run)
# Make sure no threadpool object left behind
assert not new_thread_objs

View File

@ -1,153 +0,0 @@
import time
import pytest
import mock
from File import FileServer
from Crypt import CryptRsa
from Config import config
@pytest.mark.usefixtures("resetSettings")
@pytest.mark.usefixtures("resetTempSettings")
class TestTor:
def testDownload(self, tor_manager):
for retry in range(15):
time.sleep(1)
if tor_manager.enabled and tor_manager.conn:
break
assert tor_manager.enabled
def testManagerConnection(self, tor_manager):
assert "250-version" in tor_manager.request("GETINFO version")
def testAddOnion(self, tor_manager):
# Add
address = tor_manager.addOnion()
assert address
assert address in tor_manager.privatekeys
# Delete
assert tor_manager.delOnion(address)
assert address not in tor_manager.privatekeys
def testSignOnion(self, tor_manager):
address = tor_manager.addOnion()
# Sign
sign = CryptRsa.sign(b"hello", tor_manager.getPrivatekey(address))
assert len(sign) == 128
# Verify
publickey = CryptRsa.privatekeyToPublickey(tor_manager.getPrivatekey(address))
assert len(publickey) == 140
assert CryptRsa.verify(b"hello", publickey, sign)
assert not CryptRsa.verify(b"not hello", publickey, sign)
# Pub to address
assert CryptRsa.publickeyToOnion(publickey) == address
# Delete
tor_manager.delOnion(address)
@pytest.mark.slow
def testConnection(self, tor_manager, file_server, site, site_temp):
file_server.tor_manager.start_onions = True
address = file_server.tor_manager.getOnion(site.address)
assert address
print("Connecting to", address)
for retry in range(5): # Wait for hidden service creation
time.sleep(10)
try:
connection = file_server.getConnection(address + ".onion", 1544)
if connection:
break
except Exception as err:
continue
assert connection.handshake
assert not connection.handshake["peer_id"] # No peer_id for Tor connections
# Return the same connection without site specified
assert file_server.getConnection(address + ".onion", 1544) == connection
# No reuse for different site
assert file_server.getConnection(address + ".onion", 1544, site=site) != connection
assert file_server.getConnection(address + ".onion", 1544, site=site) == file_server.getConnection(address + ".onion", 1544, site=site)
site_temp.address = "1OTHERSITE"
assert file_server.getConnection(address + ".onion", 1544, site=site) != file_server.getConnection(address + ".onion", 1544, site=site_temp)
# Only allow to query from the locked site
file_server.sites[site.address] = site
connection_locked = file_server.getConnection(address + ".onion", 1544, site=site)
assert "body" in connection_locked.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0})
assert connection_locked.request("getFile", {"site": "1OTHERSITE", "inner_path": "content.json", "location": 0})["error"] == "Invalid site"
def testPex(self, file_server, site, site_temp):
# Register site to currently running fileserver
site.connection_server = file_server
file_server.sites[site.address] = site
# Create a new file server to emulate new peer connecting to our peer
file_server_temp = FileServer(file_server.ip, 1545)
site_temp.connection_server = file_server_temp
file_server_temp.sites[site_temp.address] = site_temp
# We will request peers from this
peer_source = site_temp.addPeer(file_server.ip, 1544)
# Get ip4 peers from source site
site.addPeer("1.2.3.4", 1555) # Add peer to source site
assert peer_source.pex(need_num=10) == 1
assert len(site_temp.peers) == 2
assert "1.2.3.4:1555" in site_temp.peers
# Get onion peers from source site
site.addPeer("bka4ht2bzxchy44r.onion", 1555)
assert "bka4ht2bzxchy44r.onion:1555" not in site_temp.peers
# Don't add onion peers if not supported
assert "onion" not in file_server_temp.supported_ip_types
assert peer_source.pex(need_num=10) == 0
file_server_temp.supported_ip_types.append("onion")
assert peer_source.pex(need_num=10) == 1
assert "bka4ht2bzxchy44r.onion:1555" in site_temp.peers
def testFindHash(self, tor_manager, file_server, site, site_temp):
file_server.ip_incoming = {} # Reset flood protection
file_server.sites[site.address] = site
file_server.tor_manager = tor_manager
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
# Add file_server as peer to client
peer_file_server = site_temp.addPeer(file_server.ip, 1544)
assert peer_file_server.findHashIds([1234]) == {}
# Add fake peer with requred hash
fake_peer_1 = site.addPeer("bka4ht2bzxchy44r.onion", 1544)
fake_peer_1.hashfield.append(1234)
fake_peer_2 = site.addPeer("1.2.3.5", 1545)
fake_peer_2.hashfield.append(1234)
fake_peer_2.hashfield.append(1235)
fake_peer_3 = site.addPeer("1.2.3.6", 1546)
fake_peer_3.hashfield.append(1235)
fake_peer_3.hashfield.append(1236)
res = peer_file_server.findHashIds([1234, 1235])
assert sorted(res[1234]) == [('1.2.3.5', 1545), ("bka4ht2bzxchy44r.onion", 1544)]
assert sorted(res[1235]) == [('1.2.3.5', 1545), ('1.2.3.6', 1546)]
# Test my address adding
site.content_manager.hashfield.append(1234)
res = peer_file_server.findHashIds([1234, 1235])
assert sorted(res[1234]) == [('1.2.3.5', 1545), (file_server.ip, 1544), ("bka4ht2bzxchy44r.onion", 1544)]
assert sorted(res[1235]) == [('1.2.3.5', 1545), ('1.2.3.6', 1546)]
def testSiteOnion(self, tor_manager):
with mock.patch.object(config, "tor", "always"):
assert tor_manager.getOnion("address1") != tor_manager.getOnion("address2")
assert tor_manager.getOnion("address1") == tor_manager.getOnion("address1")

View File

@ -1,61 +0,0 @@
from Translate import Translate
class TestTranslate:
def testTranslateStrict(self):
translate = Translate()
data = """
translated = _("original")
not_translated = "original"
"""
data_translated = translate.translateData(data, {"_(original)": "translated"})
assert 'translated = _("translated")' in data_translated
assert 'not_translated = "original"' in data_translated
def testTranslateStrictNamed(self):
translate = Translate()
data = """
translated = _("original", "original named")
translated_other = _("original", "original other named")
not_translated = "original"
"""
data_translated = translate.translateData(data, {"_(original, original named)": "translated"})
assert 'translated = _("translated")' in data_translated
assert 'not_translated = "original"' in data_translated
def testTranslateUtf8(self):
translate = Translate()
data = """
greeting = "Hi again árvztűrőtökörfúrógép!"
"""
data_translated = translate.translateData(data, {"Hi again árvztűrőtökörfúrógép!": "Üdv újra árvztűrőtökörfúrógép!"})
assert data_translated == """
greeting = "Üdv újra árvztűrőtökörfúrógép!"
"""
def testTranslateEscape(self):
_ = Translate()
_["Hello"] = "Szia"
# Simple escaping
data = "{_[Hello]} {username}!"
username = "Hacker<script>alert('boom')</script>"
data_translated = _(data)
assert 'Szia' in data_translated
assert '<' not in data_translated
assert data_translated == "Szia Hacker&lt;script&gt;alert(&#x27;boom&#x27;)&lt;/script&gt;!"
# Escaping dicts
user = {"username": "Hacker<script>alert('boom')</script>"}
data = "{_[Hello]} {user[username]}!"
data_translated = _(data)
assert 'Szia' in data_translated
assert '<' not in data_translated
assert data_translated == "Szia Hacker&lt;script&gt;alert(&#x27;boom&#x27;)&lt;/script&gt;!"
# Escaping lists
users = [{"username": "Hacker<script>alert('boom')</script>"}]
data = "{_[Hello]} {users[0][username]}!"
data_translated = _(data)
assert 'Szia' in data_translated
assert '<' not in data_translated
assert data_translated == "Szia Hacker&lt;script&gt;alert(&#x27;boom&#x27;)&lt;/script&gt;!"

View File

@ -1,11 +0,0 @@
import sys
import pytest
@pytest.mark.usefixtures("resetSettings")
class TestUiWebsocket:
def testPermission(self, ui_websocket):
res = ui_websocket.testAction("ping")
assert res == "pong"
res = ui_websocket.testAction("certList")
assert "You don't have permission" in res["error"]

View File

@ -1,274 +0,0 @@
import socket
from urllib.parse import urlparse
import pytest
import mock
from util import UpnpPunch as upnp
@pytest.fixture
def mock_socket():
mock_socket = mock.MagicMock()
mock_socket.recv = mock.MagicMock(return_value=b'Hello')
mock_socket.bind = mock.MagicMock()
mock_socket.send_to = mock.MagicMock()
return mock_socket
@pytest.fixture
def url_obj():
return urlparse('http://192.168.1.1/ctrlPoint.xml')
@pytest.fixture(params=['WANPPPConnection', 'WANIPConnection'])
def igd_profile(request):
return """<root><serviceList><service>
<serviceType>urn:schemas-upnp-org:service:{}:1</serviceType>
<serviceId>urn:upnp-org:serviceId:wanpppc:pppoa</serviceId>
<controlURL>/upnp/control/wanpppcpppoa</controlURL>
<eventSubURL>/upnp/event/wanpppcpppoa</eventSubURL>
<SCPDURL>/WANPPPConnection.xml</SCPDURL>
</service></serviceList></root>""".format(request.param)
@pytest.fixture
def httplib_response():
class FakeResponse(object):
def __init__(self, status=200, body='OK'):
self.status = status
self.body = body
def read(self):
return self.body
return FakeResponse
class TestUpnpPunch(object):
def test_perform_m_search(self, mock_socket):
local_ip = '127.0.0.1'
with mock.patch('util.UpnpPunch.socket.socket',
return_value=mock_socket):
result = upnp.perform_m_search(local_ip)
assert result == 'Hello'
assert local_ip == mock_socket.bind.call_args_list[0][0][0][0]
assert ('239.255.255.250',
1900) == mock_socket.sendto.call_args_list[0][0][1]
def test_perform_m_search_socket_error(self, mock_socket):
mock_socket.recv.side_effect = socket.error('Timeout error')
with mock.patch('util.UpnpPunch.socket.socket',
return_value=mock_socket):
with pytest.raises(upnp.UpnpError):
upnp.perform_m_search('127.0.0.1')
def test_retrieve_location_from_ssdp(self, url_obj):
ctrl_location = url_obj.geturl()
parsed_location = urlparse(ctrl_location)
rsp = ('auth: gibberish\r\nlocation: {0}\r\n'
'Content-Type: text/html\r\n\r\n').format(ctrl_location)
result = upnp._retrieve_location_from_ssdp(rsp)
assert result == parsed_location
def test_retrieve_location_from_ssdp_no_header(self):
rsp = 'auth: gibberish\r\nContent-Type: application/json\r\n\r\n'
with pytest.raises(upnp.IGDError):
upnp._retrieve_location_from_ssdp(rsp)
def test_retrieve_igd_profile(self, url_obj):
with mock.patch('urllib.request.urlopen') as mock_urlopen:
upnp._retrieve_igd_profile(url_obj)
mock_urlopen.assert_called_with(url_obj.geturl(), timeout=5)
def test_retrieve_igd_profile_timeout(self, url_obj):
with mock.patch('urllib.request.urlopen') as mock_urlopen:
mock_urlopen.side_effect = socket.error('Timeout error')
with pytest.raises(upnp.IGDError):
upnp._retrieve_igd_profile(url_obj)
def test_parse_igd_profile_service_type(self, igd_profile):
control_path, upnp_schema = upnp._parse_igd_profile(igd_profile)
assert control_path == '/upnp/control/wanpppcpppoa'
assert upnp_schema in ('WANPPPConnection', 'WANIPConnection',)
def test_parse_igd_profile_no_ctrlurl(self, igd_profile):
igd_profile = igd_profile.replace('controlURL', 'nope')
with pytest.raises(upnp.IGDError):
control_path, upnp_schema = upnp._parse_igd_profile(igd_profile)
def test_parse_igd_profile_no_schema(self, igd_profile):
igd_profile = igd_profile.replace('Connection', 'nope')
with pytest.raises(upnp.IGDError):
control_path, upnp_schema = upnp._parse_igd_profile(igd_profile)
def test_create_open_message_parsable(self):
from xml.parsers.expat import ExpatError
msg, _ = upnp._create_open_message('127.0.0.1', 8888)
try:
upnp.parseString(msg)
except ExpatError as e:
pytest.fail('Incorrect XML message: {}'.format(e))
def test_create_open_message_contains_right_stuff(self):
settings = {'description': 'test desc',
'protocol': 'test proto',
'upnp_schema': 'test schema'}
msg, fn_name = upnp._create_open_message('127.0.0.1', 8888, **settings)
assert fn_name == 'AddPortMapping'
assert '127.0.0.1' in msg
assert '8888' in msg
assert settings['description'] in msg
assert settings['protocol'] in msg
assert settings['upnp_schema'] in msg
def test_parse_for_errors_bad_rsp(self, httplib_response):
rsp = httplib_response(status=500)
with pytest.raises(upnp.IGDError) as err:
upnp._parse_for_errors(rsp)
assert 'Unable to parse' in str(err.value)
def test_parse_for_errors_error(self, httplib_response):
soap_error = ('<document>'
'<errorCode>500</errorCode>'
'<errorDescription>Bad request</errorDescription>'
'</document>')
rsp = httplib_response(status=500, body=soap_error)
with pytest.raises(upnp.IGDError) as err:
upnp._parse_for_errors(rsp)
assert 'SOAP request error' in str(err.value)
def test_parse_for_errors_good_rsp(self, httplib_response):
rsp = httplib_response(status=200)
assert rsp == upnp._parse_for_errors(rsp)
def test_send_requests_success(self):
with mock.patch(
'util.UpnpPunch._send_soap_request') as mock_send_request:
mock_send_request.return_value = mock.MagicMock(status=200)
upnp._send_requests(['msg'], None, None, None)
assert mock_send_request.called
def test_send_requests_failed(self):
with mock.patch(
'util.UpnpPunch._send_soap_request') as mock_send_request:
mock_send_request.return_value = mock.MagicMock(status=500)
with pytest.raises(upnp.UpnpError):
upnp._send_requests(['msg'], None, None, None)
assert mock_send_request.called
def test_collect_idg_data(self):
pass
@mock.patch('util.UpnpPunch._get_local_ips')
@mock.patch('util.UpnpPunch._collect_idg_data')
@mock.patch('util.UpnpPunch._send_requests')
def test_ask_to_open_port_success(self, mock_send_requests,
mock_collect_idg, mock_local_ips):
mock_collect_idg.return_value = {'upnp_schema': 'schema-yo'}
mock_local_ips.return_value = ['192.168.0.12']
result = upnp.ask_to_open_port(retries=5)
soap_msg = mock_send_requests.call_args[0][0][0][0]
assert result is True
assert mock_collect_idg.called
assert '192.168.0.12' in soap_msg
assert '15441' in soap_msg
assert 'schema-yo' in soap_msg
@mock.patch('util.UpnpPunch._get_local_ips')
@mock.patch('util.UpnpPunch._collect_idg_data')
@mock.patch('util.UpnpPunch._send_requests')
def test_ask_to_open_port_failure(self, mock_send_requests,
mock_collect_idg, mock_local_ips):
mock_local_ips.return_value = ['192.168.0.12']
mock_collect_idg.return_value = {'upnp_schema': 'schema-yo'}
mock_send_requests.side_effect = upnp.UpnpError()
with pytest.raises(upnp.UpnpError):
upnp.ask_to_open_port()
@mock.patch('util.UpnpPunch._collect_idg_data')
@mock.patch('util.UpnpPunch._send_requests')
def test_orchestrate_soap_request(self, mock_send_requests,
mock_collect_idg):
soap_mock = mock.MagicMock()
args = ['127.0.0.1', 31337, soap_mock, 'upnp-test', {'upnp_schema':
'schema-yo'}]
mock_collect_idg.return_value = args[-1]
upnp._orchestrate_soap_request(*args[:-1])
assert mock_collect_idg.called
soap_mock.assert_called_with(
*args[:2] + ['upnp-test', 'UDP', 'schema-yo'])
assert mock_send_requests.called
@mock.patch('util.UpnpPunch._collect_idg_data')
@mock.patch('util.UpnpPunch._send_requests')
def test_orchestrate_soap_request_without_desc(self, mock_send_requests,
mock_collect_idg):
soap_mock = mock.MagicMock()
args = ['127.0.0.1', 31337, soap_mock, {'upnp_schema': 'schema-yo'}]
mock_collect_idg.return_value = args[-1]
upnp._orchestrate_soap_request(*args[:-1])
assert mock_collect_idg.called
soap_mock.assert_called_with(*args[:2] + [None, 'UDP', 'schema-yo'])
assert mock_send_requests.called
def test_create_close_message_parsable(self):
from xml.parsers.expat import ExpatError
msg, _ = upnp._create_close_message('127.0.0.1', 8888)
try:
upnp.parseString(msg)
except ExpatError as e:
pytest.fail('Incorrect XML message: {}'.format(e))
def test_create_close_message_contains_right_stuff(self):
settings = {'protocol': 'test proto',
'upnp_schema': 'test schema'}
msg, fn_name = upnp._create_close_message('127.0.0.1', 8888, **
settings)
assert fn_name == 'DeletePortMapping'
assert '8888' in msg
assert settings['protocol'] in msg
assert settings['upnp_schema'] in msg
@mock.patch('util.UpnpPunch._get_local_ips')
@mock.patch('util.UpnpPunch._orchestrate_soap_request')
def test_communicate_with_igd_success(self, mock_orchestrate,
mock_get_local_ips):
mock_get_local_ips.return_value = ['192.168.0.12']
upnp._communicate_with_igd()
assert mock_get_local_ips.called
assert mock_orchestrate.called
@mock.patch('util.UpnpPunch._get_local_ips')
@mock.patch('util.UpnpPunch._orchestrate_soap_request')
def test_communicate_with_igd_succeed_despite_single_failure(
self, mock_orchestrate, mock_get_local_ips):
mock_get_local_ips.return_value = ['192.168.0.12']
mock_orchestrate.side_effect = [upnp.UpnpError, None]
upnp._communicate_with_igd(retries=2)
assert mock_get_local_ips.called
assert mock_orchestrate.called
@mock.patch('util.UpnpPunch._get_local_ips')
@mock.patch('util.UpnpPunch._orchestrate_soap_request')
def test_communicate_with_igd_total_failure(self, mock_orchestrate,
mock_get_local_ips):
mock_get_local_ips.return_value = ['192.168.0.12']
mock_orchestrate.side_effect = [upnp.UpnpError, upnp.IGDError]
with pytest.raises(upnp.UpnpError):
upnp._communicate_with_igd(retries=2)
assert mock_get_local_ips.called
assert mock_orchestrate.called

View File

@ -1,50 +0,0 @@
import pytest
from Crypt import CryptBitcoin
@pytest.mark.usefixtures("resetSettings")
class TestUser:
def testAddress(self, user):
assert user.master_address == "15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc"
address_index = 1458664252141532163166741013621928587528255888800826689784628722366466547364755811
assert user.getAddressAuthIndex("15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc") == address_index
# Re-generate privatekey based on address_index
def testNewSite(self, user):
address, address_index, site_data = user.getNewSiteData() # Create a new random site
assert CryptBitcoin.hdPrivatekey(user.master_seed, address_index) == site_data["privatekey"]
user.sites = {} # Reset user data
# Site address and auth address is different
assert user.getSiteData(address)["auth_address"] != address
# Re-generate auth_privatekey for site
assert user.getSiteData(address)["auth_privatekey"] == site_data["auth_privatekey"]
def testAuthAddress(self, user):
# Auth address without Cert
auth_address = user.getAuthAddress("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr")
assert auth_address == "1MyJgYQjeEkR9QD66nkfJc9zqi9uUy5Lr2"
auth_privatekey = user.getAuthPrivatekey("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr")
assert CryptBitcoin.privatekeyToAddress(auth_privatekey) == auth_address
def testCert(self, user):
cert_auth_address = user.getAuthAddress("1iD5ZQJMNXu43w1qLB8sfdHVKppVMduGz") # Add site to user's registry
# Add cert
user.addCert(cert_auth_address, "zeroid.bit", "faketype", "fakeuser", "fakesign")
user.setCert("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr", "zeroid.bit")
# By using certificate the auth address should be same as the certificate provider
assert user.getAuthAddress("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr") == cert_auth_address
auth_privatekey = user.getAuthPrivatekey("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr")
assert CryptBitcoin.privatekeyToAddress(auth_privatekey) == cert_auth_address
# Test delete site data
assert "1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr" in user.sites
user.deleteSiteData("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr")
assert "1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr" not in user.sites
# Re-create add site should generate normal, unique auth_address
assert not user.getAuthAddress("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr") == cert_auth_address
assert user.getAuthAddress("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr") == "1MyJgYQjeEkR9QD66nkfJc9zqi9uUy5Lr2"

View File

@ -1,105 +0,0 @@
import urllib.request
import pytest
try:
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support.expected_conditions import staleness_of, title_is
from selenium.common.exceptions import NoSuchElementException
except:
pass
class WaitForPageLoad(object):
def __init__(self, browser):
self.browser = browser
def __enter__(self):
self.old_page = self.browser.find_element_by_tag_name('html')
def __exit__(self, *args):
WebDriverWait(self.browser, 10).until(staleness_of(self.old_page))
def getContextUrl(browser):
return browser.execute_script("return window.location.toString()")
def getUrl(url):
content = urllib.request.urlopen(url).read()
assert "server error" not in content.lower(), "Got a server error! " + repr(url)
return content
@pytest.mark.usefixtures("resetSettings")
@pytest.mark.webtest
class TestWeb:
def testFileSecurity(self, site_url):
assert "Not Found" in getUrl("%s/media/sites.json" % site_url)
assert "Forbidden" in getUrl("%s/media/./sites.json" % site_url)
assert "Forbidden" in getUrl("%s/media/../config.py" % site_url)
assert "Forbidden" in getUrl("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url)
assert "Forbidden" in getUrl("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url)
assert "Forbidden" in getUrl("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url)
assert "Not Found" in getUrl("%s/raw/sites.json" % site_url)
assert "Forbidden" in getUrl("%s/raw/./sites.json" % site_url)
assert "Forbidden" in getUrl("%s/raw/../config.py" % site_url)
assert "Forbidden" in getUrl("%s/raw/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url)
assert "Forbidden" in getUrl("%s/raw/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url)
assert "Forbidden" in getUrl("%s/raw/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url)
assert "Forbidden" in getUrl("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url)
assert "Forbidden" in getUrl("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url)
assert "Forbidden" in getUrl("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url)
assert "Forbidden" in getUrl("%s/content.db" % site_url)
assert "Forbidden" in getUrl("%s/./users.json" % site_url)
assert "Forbidden" in getUrl("%s/./key-rsa.pem" % site_url)
assert "Forbidden" in getUrl("%s/././././././././././//////sites.json" % site_url)
def testLinkSecurity(self, browser, site_url):
browser.get("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/test/security.html" % site_url)
WebDriverWait(browser, 10).until(title_is("ZeroHello - ZeroNet"))
assert getContextUrl(browser) == "%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/test/security.html" % site_url
# Switch to inner frame
browser.switch_to.frame(browser.find_element_by_id("inner-iframe"))
assert "wrapper_nonce" in getContextUrl(browser)
assert browser.find_element_by_id("script_output").text == "Result: Works"
browser.switch_to.default_content()
# Clicking on links without target
browser.switch_to.frame(browser.find_element_by_id("inner-iframe"))
with WaitForPageLoad(browser):
browser.find_element_by_id("link_to_current").click()
assert "wrapper_nonce" not in getContextUrl(browser) # The browser object back to default content
assert "Forbidden" not in browser.page_source
# Check if we have frame inside frame
browser.switch_to.frame(browser.find_element_by_id("inner-iframe"))
with pytest.raises(NoSuchElementException):
assert not browser.find_element_by_id("inner-iframe")
browser.switch_to.default_content()
# Clicking on link with target=_top
browser.switch_to.frame(browser.find_element_by_id("inner-iframe"))
with WaitForPageLoad(browser):
browser.find_element_by_id("link_to_top").click()
assert "wrapper_nonce" not in getContextUrl(browser) # The browser object back to default content
assert "Forbidden" not in browser.page_source
browser.switch_to.default_content()
# Try to escape from inner_frame
browser.switch_to.frame(browser.find_element_by_id("inner-iframe"))
assert "wrapper_nonce" in getContextUrl(browser) # Make sure we are inside of the inner-iframe
with WaitForPageLoad(browser):
browser.execute_script("window.top.location = window.location")
assert "wrapper_nonce" in getContextUrl(browser) # We try to use nonce-ed html without iframe
assert "<iframe" in browser.page_source # Only allow to use nonce once-time
browser.switch_to.default_content()
def testRaw(self, browser, site_url):
browser.get("%s/raw/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/test/security.html" % site_url)
WebDriverWait(browser, 10).until(title_is("Security tests"))
assert getContextUrl(browser) == "%s/raw/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/test/security.html" % site_url
assert browser.find_element_by_id("script_output").text == "Result: Fail"

View File

@ -1,128 +0,0 @@
import pytest
from Worker import WorkerTaskManager
from . import Spy
class TestUiWebsocket:
def checkSort(self, tasks): # Check if it has the same order as a list sorted separately
tasks_list = list(tasks)
tasks_list.sort(key=lambda task: task["id"])
assert tasks_list != list(tasks)
tasks_list.sort(key=lambda task: (0 - (task["priority"] - task["workers_num"] * 10), task["id"]))
assert tasks_list == list(tasks)
def testAppendSimple(self):
tasks = WorkerTaskManager.WorkerTaskManager()
tasks.append({"id": 1, "priority": 15, "workers_num": 1, "inner_path": "file1.json"})
tasks.append({"id": 2, "priority": 1, "workers_num": 0, "inner_path": "file2.json"})
tasks.append({"id": 3, "priority": 8, "workers_num": 0, "inner_path": "file3.json"})
assert [task["inner_path"] for task in tasks] == ["file3.json", "file1.json", "file2.json"]
self.checkSort(tasks)
def testAppendMany(self):
tasks = WorkerTaskManager.WorkerTaskManager()
for i in range(1000):
tasks.append({"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i})
assert tasks[0]["inner_path"] == "file39.json"
assert tasks[-1]["inner_path"] == "file980.json"
self.checkSort(tasks)
def testRemove(self):
tasks = WorkerTaskManager.WorkerTaskManager()
for i in range(1000):
tasks.append({"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i})
i = 333
task = {"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i}
assert task in tasks
with Spy.Spy(tasks, "indexSlow") as calls:
tasks.remove(task)
assert len(calls) == 0
assert task not in tasks
# Remove non existent item
with Spy.Spy(tasks, "indexSlow") as calls:
with pytest.raises(ValueError):
tasks.remove(task)
assert len(calls) == 0
self.checkSort(tasks)
def testRemoveAll(self):
tasks = WorkerTaskManager.WorkerTaskManager()
tasks_list = []
for i in range(1000):
task = {"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i}
tasks.append(task)
tasks_list.append(task)
for task in tasks_list:
tasks.remove(task)
assert len(tasks.inner_paths) == 0
assert len(tasks) == 0
def testModify(self):
tasks = WorkerTaskManager.WorkerTaskManager()
for i in range(1000):
tasks.append({"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i})
task = tasks[333]
task["priority"] += 10
with pytest.raises(AssertionError):
self.checkSort(tasks)
with Spy.Spy(tasks, "indexSlow") as calls:
tasks.updateItem(task)
assert len(calls) == 1
assert task in tasks
self.checkSort(tasks)
# Check reorder optimization
with Spy.Spy(tasks, "indexSlow") as calls:
tasks.updateItem(task, "priority", task["priority"] + 10)
assert len(calls) == 0
with Spy.Spy(tasks, "indexSlow") as calls:
tasks.updateItem(task, "priority", task["workers_num"] - 1)
assert len(calls) == 0
self.checkSort(tasks)
def testModifySamePriority(self):
tasks = WorkerTaskManager.WorkerTaskManager()
for i in range(1000):
tasks.append({"id": i, "priority": 10, "workers_num": 5, "inner_path": "file%s.json" % i})
task = tasks[333]
# Check reorder optimization
with Spy.Spy(tasks, "indexSlow") as calls:
tasks.updateItem(task, "priority", task["workers_num"] - 1)
assert len(calls) == 0
def testIn(self):
tasks = WorkerTaskManager.WorkerTaskManager()
i = 1
task = {"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i}
assert task not in tasks
def testFindTask(self):
tasks = WorkerTaskManager.WorkerTaskManager()
for i in range(1000):
tasks.append({"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i})
assert tasks.findTask("file999.json")
assert not tasks.findTask("file-unknown.json")
tasks.remove(tasks.findTask("file999.json"))
assert not tasks.findTask("file999.json")

View File

View File

@ -1,497 +0,0 @@
import os
import sys
import urllib.request
import time
import logging
import json
import shutil
import gc
import datetime
import atexit
import threading
import socket
import pytest
import mock
import gevent
if "libev" not in str(gevent.config.loop):
# Workaround for random crash when libuv used with threads
gevent.config.loop = "libev-cext"
import gevent.event
from gevent import monkey
monkey.patch_all(thread=False, subprocess=False)
atexit_register = atexit.register
atexit.register = lambda func: "" # Don't register shutdown functions to avoid IO error on exit
def pytest_addoption(parser):
parser.addoption("--slow", action='store_true', default=False, help="Also run slow tests")
def pytest_collection_modifyitems(config, items):
if config.getoption("--slow"):
# --runslow given in cli: do not skip slow tests
return
skip_slow = pytest.mark.skip(reason="need --slow option to run")
for item in items:
if "slow" in item.keywords:
item.add_marker(skip_slow)
# Config
if sys.platform == "win32":
CHROMEDRIVER_PATH = "tools/chrome/chromedriver.exe"
else:
CHROMEDRIVER_PATH = "chromedriver"
SITE_URL = "http://127.0.0.1:43110"
TEST_DATA_PATH = 'src/Test/testdata'
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__) + "/../lib")) # External modules directory
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__) + "/..")) # Imports relative to src dir
from Config import config
config.argv = ["none"] # Dont pass any argv to config parser
config.parse(silent=True, parse_config=False) # Plugins need to access the configuration
config.action = "test"
# Load plugins
from Plugin import PluginManager
config.data_dir = TEST_DATA_PATH # Use test data for unittests
config.debug = True
os.chdir(os.path.abspath(os.path.dirname(__file__) + "/../..")) # Set working dir
all_loaded = PluginManager.plugin_manager.loadPlugins()
assert all_loaded, "Not all plugin loaded successfully"
config.loadPlugins()
config.parse(parse_config=False) # Parse again to add plugin configuration options
config.action = "test"
config.debug = True
config.debug_socket = True # Use test data for unittests
config.verbose = True # Use test data for unittests
config.tor = "disable" # Don't start Tor client
config.trackers = []
config.data_dir = TEST_DATA_PATH # Use test data for unittests
if "ZERONET_LOG_DIR" in os.environ:
config.log_dir = os.environ["ZERONET_LOG_DIR"]
config.initLogging(console_logging=False)
# Set custom formatter with realative time format (via: https://stackoverflow.com/questions/31521859/python-logging-module-time-since-last-log)
time_start = time.time()
class TimeFilter(logging.Filter):
def __init__(self, *args, **kwargs):
self.time_last = time.time()
self.main_thread_id = threading.current_thread().ident
super().__init__(*args, **kwargs)
def filter(self, record):
if threading.current_thread().ident != self.main_thread_id:
record.thread_marker = "T"
record.thread_title = "(Thread#%s)" % self.main_thread_id
else:
record.thread_marker = " "
record.thread_title = ""
since_last = time.time() - self.time_last
if since_last > 0.1:
line_marker = "!"
elif since_last > 0.02:
line_marker = "*"
elif since_last > 0.01:
line_marker = "-"
else:
line_marker = " "
since_start = time.time() - time_start
record.since_start = "%s%.3fs" % (line_marker, since_start)
self.time_last = time.time()
return True
log = logging.getLogger()
fmt = logging.Formatter(fmt='%(since_start)s %(thread_marker)s %(levelname)-8s %(name)s %(message)s %(thread_title)s')
[hndl.addFilter(TimeFilter()) for hndl in log.handlers]
[hndl.setFormatter(fmt) for hndl in log.handlers]
from Site.Site import Site
from Site import SiteManager
from User import UserManager
from File import FileServer
from Connection import ConnectionServer
from Crypt import CryptConnection
from Crypt import CryptBitcoin
from Ui import UiWebsocket
from Tor import TorManager
from Content import ContentDb
from util import RateLimit
from Db import Db
from Debug import Debug
gevent.get_hub().NOT_ERROR += (Debug.Notify,)
def cleanup():
Db.dbCloseAll()
for dir_path in [config.data_dir, config.data_dir + "-temp"]:
if os.path.isdir(dir_path):
for file_name in os.listdir(dir_path):
ext = file_name.rsplit(".", 1)[-1]
if ext not in ["csr", "pem", "srl", "db", "json", "tmp"]:
continue
file_path = dir_path + "/" + file_name
if os.path.isfile(file_path):
os.unlink(file_path)
atexit_register(cleanup)
@pytest.fixture(scope="session")
def resetSettings(request):
open("%s/sites.json" % config.data_dir, "w").write("{}")
open("%s/filters.json" % config.data_dir, "w").write("{}")
open("%s/users.json" % config.data_dir, "w").write("""
{
"15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc": {
"certs": {},
"master_seed": "024bceac1105483d66585d8a60eaf20aa8c3254b0f266e0d626ddb6114e2949a",
"sites": {}
}
}
""")
@pytest.fixture(scope="session")
def resetTempSettings(request):
data_dir_temp = config.data_dir + "-temp"
if not os.path.isdir(data_dir_temp):
os.mkdir(data_dir_temp)
open("%s/sites.json" % data_dir_temp, "w").write("{}")
open("%s/filters.json" % data_dir_temp, "w").write("{}")
open("%s/users.json" % data_dir_temp, "w").write("""
{
"15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc": {
"certs": {},
"master_seed": "024bceac1105483d66585d8a60eaf20aa8c3254b0f266e0d626ddb6114e2949a",
"sites": {}
}
}
""")
def cleanup():
os.unlink("%s/sites.json" % data_dir_temp)
os.unlink("%s/users.json" % data_dir_temp)
os.unlink("%s/filters.json" % data_dir_temp)
request.addfinalizer(cleanup)
@pytest.fixture()
def site(request):
threads_before = [obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet)]
# Reset ratelimit
RateLimit.queue_db = {}
RateLimit.called_db = {}
site = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
# Always use original data
assert "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" in site.storage.getPath("") # Make sure we dont delete everything
shutil.rmtree(site.storage.getPath(""), True)
shutil.copytree(site.storage.getPath("") + "-original", site.storage.getPath(""))
# Add to site manager
SiteManager.site_manager.get("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
site.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
def cleanup():
site.delete()
site.content_manager.contents.db.close("Test cleanup")
site.content_manager.contents.db.timer_check_optional.kill()
SiteManager.site_manager.sites.clear()
db_path = "%s/content.db" % config.data_dir
os.unlink(db_path)
del ContentDb.content_dbs[db_path]
gevent.killall([obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet) and obj not in threads_before])
request.addfinalizer(cleanup)
site.greenlet_manager.stopGreenlets()
site = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") # Create new Site object to load content.json files
if not SiteManager.site_manager.sites:
SiteManager.site_manager.sites = {}
SiteManager.site_manager.sites["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"] = site
site.settings["serving"] = True
return site
@pytest.fixture()
def site_temp(request):
threads_before = [obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet)]
with mock.patch("Config.config.data_dir", config.data_dir + "-temp"):
site_temp = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
site_temp.settings["serving"] = True
site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
def cleanup():
site_temp.delete()
site_temp.content_manager.contents.db.close("Test cleanup")
site_temp.content_manager.contents.db.timer_check_optional.kill()
db_path = "%s-temp/content.db" % config.data_dir
os.unlink(db_path)
del ContentDb.content_dbs[db_path]
gevent.killall([obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet) and obj not in threads_before])
request.addfinalizer(cleanup)
site_temp.log = logging.getLogger("Temp:%s" % site_temp.address_short)
return site_temp
@pytest.fixture(scope="session")
def user():
user = UserManager.user_manager.get()
if not user:
user = UserManager.user_manager.create()
user.sites = {} # Reset user data
return user
@pytest.fixture(scope="session")
def browser(request):
try:
from selenium import webdriver
print("Starting chromedriver...")
options = webdriver.chrome.options.Options()
options.add_argument("--headless")
options.add_argument("--window-size=1920x1080")
options.add_argument("--log-level=1")
browser = webdriver.Chrome(executable_path=CHROMEDRIVER_PATH, service_log_path=os.path.devnull, options=options)
def quit():
browser.quit()
request.addfinalizer(quit)
except Exception as err:
raise pytest.skip("Test requires selenium + chromedriver: %s" % err)
return browser
@pytest.fixture(scope="session")
def site_url():
try:
urllib.request.urlopen(SITE_URL).read()
except Exception as err:
raise pytest.skip("Test requires zeronet client running: %s" % err)
return SITE_URL
@pytest.fixture(params=['ipv4', 'ipv6'])
def file_server(request):
if request.param == "ipv4":
return request.getfixturevalue("file_server4")
else:
return request.getfixturevalue("file_server6")
@pytest.fixture
def file_server4(request):
time.sleep(0.1)
file_server = FileServer("127.0.0.1", 1544)
file_server.ip_external = "1.2.3.4" # Fake external ip
def listen():
ConnectionServer.start(file_server)
ConnectionServer.listen(file_server)
gevent.spawn(listen)
# Wait for port opening
for retry in range(10):
time.sleep(0.1) # Port opening
try:
conn = file_server.getConnection("127.0.0.1", 1544)
conn.close()
break
except Exception as err:
print("FileServer6 startup error", Debug.formatException(err))
assert file_server.running
file_server.ip_incoming = {} # Reset flood protection
def stop():
file_server.stop()
request.addfinalizer(stop)
return file_server
@pytest.fixture
def file_server6(request):
try:
sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
sock.connect(("::1", 80, 1, 1))
has_ipv6 = True
except OSError:
has_ipv6 = False
if not has_ipv6:
pytest.skip("Ipv6 not supported")
time.sleep(0.1)
file_server6 = FileServer("::1", 1544)
file_server6.ip_external = 'fca5:95d6:bfde:d902:8951:276e:1111:a22c' # Fake external ip
def listen():
ConnectionServer.start(file_server6)
ConnectionServer.listen(file_server6)
gevent.spawn(listen)
# Wait for port opening
for retry in range(10):
time.sleep(0.1) # Port opening
try:
conn = file_server6.getConnection("::1", 1544)
conn.close()
break
except Exception as err:
print("FileServer6 startup error", Debug.formatException(err))
assert file_server6.running
file_server6.ip_incoming = {} # Reset flood protection
def stop():
file_server6.stop()
request.addfinalizer(stop)
return file_server6
@pytest.fixture()
def ui_websocket(site, user):
class WsMock:
def __init__(self):
self.result = gevent.event.AsyncResult()
def send(self, data):
logging.debug("WsMock: Set result (data: %s) called by %s" % (data, Debug.formatStack()))
self.result.set(json.loads(data)["result"])
def getResult(self):
logging.debug("WsMock: Get result")
back = self.result.get()
logging.debug("WsMock: Got result (data: %s)" % back)
self.result = gevent.event.AsyncResult()
return back
ws_mock = WsMock()
ui_websocket = UiWebsocket(ws_mock, site, None, user, None)
def testAction(action, *args, **kwargs):
ui_websocket.handleRequest({"id": 0, "cmd": action, "params": list(args) if args else kwargs})
return ui_websocket.ws.getResult()
ui_websocket.testAction = testAction
return ui_websocket
@pytest.fixture(scope="session")
def tor_manager():
try:
tor_manager = TorManager(fileserver_port=1544)
tor_manager.start()
assert tor_manager.conn is not None
tor_manager.startOnions()
except Exception as err:
raise pytest.skip("Test requires Tor with ControlPort: %s, %s" % (config.tor_controller, err))
return tor_manager
@pytest.fixture()
def db(request):
db_path = "%s/zeronet.db" % config.data_dir
schema = {
"db_name": "TestDb",
"db_file": "%s/zeronet.db" % config.data_dir,
"maps": {
"data.json": {
"to_table": [
"test",
{"node": "test", "table": "test_importfilter", "import_cols": ["test_id", "title"]}
]
}
},
"tables": {
"test": {
"cols": [
["test_id", "INTEGER"],
["title", "TEXT"],
["json_id", "INTEGER REFERENCES json (json_id)"]
],
"indexes": ["CREATE UNIQUE INDEX test_id ON test(test_id)"],
"schema_changed": 1426195822
},
"test_importfilter": {
"cols": [
["test_id", "INTEGER"],
["title", "TEXT"],
["json_id", "INTEGER REFERENCES json (json_id)"]
],
"indexes": ["CREATE UNIQUE INDEX test_importfilter_id ON test_importfilter(test_id)"],
"schema_changed": 1426195822
}
}
}
if os.path.isfile(db_path):
os.unlink(db_path)
db = Db.Db(schema, db_path)
db.checkTables()
def stop():
db.close("Test db cleanup")
os.unlink(db_path)
request.addfinalizer(stop)
return db
@pytest.fixture(params=["sslcrypto", "sslcrypto_fallback", "libsecp256k1"])
def crypt_bitcoin_lib(request, monkeypatch):
monkeypatch.setattr(CryptBitcoin, "lib_verify_best", request.param)
CryptBitcoin.loadLib(request.param)
return CryptBitcoin
@pytest.fixture(scope='function', autouse=True)
def logCaseStart(request):
global time_start
time_start = time.time()
logging.debug("---- Start test case: %s ----" % request._pyfuncitem)
yield None # Wait until all test done
# Workaround for pytest bug when logging in atexit/post-fixture handlers (I/O operation on closed file)
def workaroundPytestLogError():
import _pytest.capture
write_original = _pytest.capture.EncodedFile.write
def write_patched(obj, *args, **kwargs):
try:
write_original(obj, *args, **kwargs)
except ValueError as err:
if str(err) == "I/O operation on closed file":
pass
else:
raise err
def flush_patched(obj, *args, **kwargs):
try:
obj.buffer.flush(*args, **kwargs)
except ValueError as err:
if str(err).startswith("I/O operation on closed file"):
pass
else:
raise err
_pytest.capture.EncodedFile.write = write_patched
_pytest.capture.EncodedFile.flush = flush_patched
workaroundPytestLogError()
@pytest.fixture(scope='session', autouse=True)
def disableLog():
yield None # Wait until all test done
logging.getLogger('').setLevel(logging.getLevelName(logging.CRITICAL))

View File

@ -1,15 +0,0 @@
[run]
branch = True
concurrency = gevent
omit =
src/lib/*
src/Test/*
[report]
exclude_lines =
pragma: no cover
if __name__ == .__main__.:
if config.debug:
if config.debug_socket:
if self.logging:
def __repr__

View File

@ -1,6 +0,0 @@
[pytest]
python_files = Test*.py
addopts = -rsxX -v --durations=6 --no-print-logs --capture=fd
markers =
slow: mark a tests as slow.
webtest: mark a test as a webtest.

View File

@ -1,133 +0,0 @@
{
"address": "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT",
"background-color": "white",
"description": "Blogging platform Demo",
"domain": "Blog.ZeroNetwork.bit",
"files": {
"css/all.css": {
"sha512": "65ddd3a2071a0f48c34783aa3b1bde4424bdea344630af05a237557a62bd55dc",
"size": 112710
},
"data-default/data.json": {
"sha512": "3f5c5a220bde41b464ab116cce0bd670dd0b4ff5fe4a73d1dffc4719140038f2",
"size": 196
},
"data-default/users/content-default.json": {
"sha512": "0603ce08f7abb92b3840ad0cf40e95ea0b3ed3511b31524d4d70e88adba83daa",
"size": 679
},
"data/data.json": {
"sha512": "0f2321c905b761a05c360a389e1de149d952b16097c4ccf8310158356e85fb52",
"size": 31126
},
"data/img/autoupdate.png": {
"sha512": "d2b4dc8e0da2861ea051c0c13490a4eccf8933d77383a5b43de447c49d816e71",
"size": 24460
},
"data/img/direct_domains.png": {
"sha512": "5f14b30c1852735ab329b22496b1e2ea751cb04704789443ad73a70587c59719",
"size": 16185
},
"data/img/domain.png": {
"sha512": "ce87e0831f4d1e95a95d7120ca4d33f8273c6fce9f5bbedf7209396ea0b57b6a",
"size": 11881
},
"data/img/memory.png": {
"sha512": "dd56515085b4a79b5809716f76f267ec3a204be3ee0d215591a77bf0f390fa4e",
"size": 12775
},
"data/img/multiuser.png": {
"sha512": "88e3f795f9b86583640867897de6efc14e1aa42f93e848ed1645213e6cc210c6",
"size": 29480
},
"data/img/progressbar.png": {
"sha512": "23d592ae386ce14158cec34d32a3556771725e331c14d5a4905c59e0fe980ebf",
"size": 13294
},
"data/img/slides.png": {
"sha512": "1933db3b90ab93465befa1bd0843babe38173975e306286e08151be9992f767e",
"size": 14439
},
"data/img/slots_memory.png": {
"sha512": "82a250e6da909d7f66341e5b5c443353958f86728cd3f06e988b6441e6847c29",
"size": 9488
},
"data/img/trayicon.png": {
"sha512": "e7ae65bf280f13fb7175c1293dad7d18f1fcb186ebc9e1e33850cdaccb897b8f",
"size": 19040
},
"dbschema.json": {
"sha512": "2e9466d8aa1f340c91203b4ddbe9b6669879616a1b8e9571058a74195937598d",
"size": 1527
},
"img/loading.gif": {
"sha512": "8a42b98962faea74618113166886be488c09dad10ca47fe97005edc5fb40cc00",
"size": 723
},
"index.html": {
"sha512": "c4039ebfc4cb6f116cac05e803a18644ed70404474a572f0d8473f4572f05df3",
"size": 4667
},
"js/all.js": {
"sha512": "034c97535f3c9b3fbebf2dcf61a38711dae762acf1a99168ae7ddc7e265f582c",
"size": 201178
}
},
"files_optional": {
"data/img/zeroblog-comments.png": {
"sha512": "efe4e815a260e555303e5c49e550a689d27a8361f64667bd4a91dbcccb83d2b4",
"size": 24001
},
"data/img/zeroid.png": {
"sha512": "b46d541a9e51ba2ddc8a49955b7debbc3b45fd13467d3c20ef104e9d938d052b",
"size": 18875
},
"data/img/zeroname.png": {
"sha512": "bab45a1bb2087b64e4f69f756b2ffa5ad39b7fdc48c83609cdde44028a7a155d",
"size": 36031
},
"data/img/zerotalk-mark.png": {
"sha512": "a335b2fedeb8d291ca68d3091f567c180628e80f41de4331a5feb19601d078af",
"size": 44862
},
"data/img/zerotalk-upvote.png": {
"sha512": "b1ffd7f948b4f99248dde7efe256c2efdfd997f7e876fb9734f986ef2b561732",
"size": 41092
},
"data/img/zerotalk.png": {
"sha512": "54d10497a1ffca9a4780092fd1bd158c15f639856d654d2eb33a42f9d8e33cd8",
"size": 26606
},
"data/optional.txt": {
"sha512": "c6f81db0e9f8206c971c9e5826e3ba823ffbb1a3a900f8047652a8bf78ea98fd",
"size": 6
}
},
"ignore": "((js|css)/(?!all.(js|css))|data/.*db|data/users/.*/.*|data/test_include/.*)",
"includes": {
"data/test_include/content.json": {
"added": 1424976057,
"files_allowed": "data.json",
"includes_allowed": false,
"max_size": 20000,
"signers": ["15ik6LeBWnACWfaika1xqGapRZ1zh3JpCo"],
"signers_required": 1,
"user_id": 47,
"user_name": "test"
},
"data/users/content.json": {
"signers": ["1LSxsKfC9S9TVXGGNSM3vPHjyW82jgCX5f"],
"signers_required": 1
}
},
"inner_path": "content.json",
"modified": 1503257990,
"optional": "(data/img/zero.*|data/optional.*)",
"signers_sign": "HDNmWJHM2diYln4pkdL+qYOvgE7MdwayzeG+xEUZBgp1HtOjBJS+knDEVQsBkjcOPicDG2it1r6R1eQrmogqSP0=",
"signs": {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": "G4Uq365UBliQG66ygip1jNGYqW6Eh9Mm7nLguDFqAgk/Hksq/ruqMf9rXv78mgUfPBvL2+XgDKYvFDtlykPFZxk="
},
"signs_required": 1,
"title": "ZeroBlog",
"zeronet_version": "0.5.7"
}

File diff suppressed because one or more lines are too long

View File

@ -1,10 +0,0 @@
{
"title": "MyZeroBlog",
"description": "My ZeroBlog.",
"links": "- [Source code](https://github.com/HelloZeroNet)",
"next_post_id": 1,
"demo": false,
"modified": 1432515193,
"post": [
]
}

View File

@ -1,25 +0,0 @@
{
"files": {},
"ignore": ".*",
"modified": 1432466966.003,
"signs": {
"1BLogC9LN4oPDcruNz3qo1ysa133E9AGg8": "HChU28lG4MCnAiui6wDAaVCD4QUrgSy4zZ67+MMHidcUJRkLGnO3j4Eb1N0AWQ86nhSBwoOQf08Rha7gRyTDlAk="
},
"user_contents": {
"cert_signers": {
"zeroid.bit": [ "1iD5ZQJMNXu43w1qLB8sfdHVKppVMduGz" ]
},
"permission_rules": {
".*": {
"files_allowed": "data.json",
"max_size": 10000
},
"bitid/.*@zeroid.bit": { "max_size": 40000 },
"bitmsg/.*@zeroid.bit": { "max_size": 15000 }
},
"permissions": {
"banexample@zeroid.bit": false,
"nofish@zeroid.bit": { "max_size": 20000 }
}
}
}

View File

@ -1,244 +0,0 @@
{
"title": "ZeroBlog",
"description": "Demo for decentralized, self publishing blogging platform.",
"links": "- [Source code](https://github.com/HelloZeroNet)\n- [Create new blog](?Post:3:How+to+have+a+blog+like+this)",
"next_post_id": 42,
"demo": false,
"modified": 1433033806,
"post": [
{
"post_id": 41,
"title": "Changelog: May 31, 2015",
"date_published": 1433033779.604,
"body": " - rev194\n - Ugly OpenSSL memory leak fix\n - Added Docker and Vargant files (thanks to n3r0-ch)\n\nZeroBlog\n - Comment editing, Deleting, Replying added\n\nNew official site: http://zeronet.io/"
},
{
"post_id": 40,
"title": "Trusted authorization providers",
"date_published": 1432549828.319,
"body": "What is it good for?\n\n - It allows you to have multi-user sites without need of a bot that listen to new user registration requests.\n - You can use the same username across sites\n - The site owner can give you (or revoke) permissions based on your ZeroID username\n\nHow does it works?\n\n - You visit an authorization provider site (eg zeroid.bit)\n - You enter the username you want to register and sent the request to the authorization provider site owner (zeroid supports bitmessage and simple http request).\n - The authorization provider process your request and it he finds everything all right (unique username, other anti-spam methods) he sends you a certificate for the username registration.\n - If a site trust your authorization provider you can post your own content (comments, topics, upvotes, etc.) using this certificate without ever contacting the site owner.\n\nWhat sites currently supports ZeroID?\n\n - You can post comments to ZeroBlog using your ZeroID\n - Later, if everyone is updated to 0.3.0 a new ZeroTalk is also planned that supports ZeroID certificates\n\nWhy is it necessary?\n\n - To have some kind of control over the users of your site. (eg. remove misbehaving users)\n\nOther info\n\n - ZeroID is a standard site, anyone can clone it and have his/her own one\n - You can stop seeding ZeroID site after you got your cert"
},
{
"post_id": 39,
"title": "Changelog: May 25, 2015",
"date_published": 1432511642.167,
"body": "- Version 0.3.0, rev187\n- Trusted authorization provider support: Easier multi-user sites by allowing site owners to define tusted third-party user certificate signers. (more info about it in the next days)\n- `--publish` option to siteSign to publish automatically after the new files signed.\n- `cryptSign` command line command to sign message using private key.\n- New, more stable OpenSSL layer that also works on OSX.\n- New json table format support.\n- DbCursor SELECT parameters bugfix.\n- Faster multi-threaded peer discovery from trackers.\n- New http trackers added.\n- Wait for dbschema.json file to execute query.\n- Handle json import errors.\n- More compact json writeJson storage command output.\n- Workaround to make non target=_top links work.\n- Cleaner UiWebsocket command router.\n- Notify other local users on local file changes.\n- Option to wait file download before execute query.\n- fileRules, certAdd, certSelect, certSet websocket API commands.\n- Allow more file errors on big sites.\n- On stucked downloads skip worker's current file instead of stopping it.\n- NoParallel parameter bugfix.\n- RateLimit interval bugfix.\n- Updater skips non-writeable files.\n- Try to close OpenSSL dll before update.\n\nZeroBlog:\n- Rewritten to use SQL database\n- Commenting on posts (**Please note: The comment publishing and distribution can be slow until most of the clients is not updated to version 0.3.0**)\n\n![comments](data/img/zeroblog-comments.png)\n\nZeroID\n- Sample Trusted authorization provider site with Bitmessage registration support\n\n![comments](data/img/zeroid.png)"
},
{
"post_id": 38,
"title": "Status report: Trusted authorization providers",
"date_published": 1431286381.226,
"body": "Currently working on a new feature that allows to create multi-user sites more easily. For example it will allows us to have comments on ZeroBlog (without contacting the site owner).\n\nCurrent status:\n\n - Sign/verification process: 90%\n - Sample trusted authorization provider site: 70%\n - ZeroBlog modifications: 30%\n - Authorization UI enhacements: 10%\n - Total progress: 60%\n \nEta.: 1-2weeks\n\n### Update: May 18, 2015:\n\nThings left:\n - More ZeroBlog modifications on commenting interface\n - Bitmessage support in Sample trusted authorization provider site\n - Test everything on multiple platform/browser and machine\n - Total progress: 80%\n\nIf no major flaw discovered it should be out this week."
},
{
"post_id": 37,
"title": "Changelog: May 3, 2015",
"date_published": 1430652299.794,
"body": " - rev134\n - Removed ZeroMQ dependencies and support (if you are on pre 0.2.0 version please, upgrade)\n - Save CPU and memory on file requests by streaming content directly to socket without loading to memory and encoding with msgpack.\n - Sites updates without re-download all content.json by querying the modified files from peers.\n - Fix urllib memory leak\n - SiteManager testsuite\n - Fix UiServer security testsuite\n - Announce to tracker on site resume\n\nZeroBoard:\n\n - Only last 100 messages loaded by default\n - Typo fix"
},
{
"post_id": 36,
"title": "Changelog: Apr 29, 2015",
"date_published": 1430388168.315,
"body": " - rev126\n - You can install the \"127.0.0.1:43110-less\" extension from [Chrome Web Store](https://chrome.google.com/webstore/detail/zeronet-protocol/cpkpdcdljfbnepgfejplkhdnopniieop). (thanks to g0ld3nrati0!)\n - You can disable the use of openssl using `--use_openssl False`\n - OpenSSL disabled on OSX because of possible segfault. You can enable it again using `zeronet.py --use_openssl True`,<br> please [give your feedback](https://github.com/HelloZeroNet/ZeroNet/issues/94)!\n - Update on non existent file bugfix\n - Save 20% memory using Python slots\n\n![Memory save](data/img/slots_memory.png)"
},
{
"post_id": 35,
"title": "Changelog: Apr 27, 2015",
"date_published": 1430180561.716,
"body": " - Revision 122\n - 40x faster signature verification by using OpenSSL if available\n - Added OpenSSL benchmark: beat my CPU at http://127.0.0.1:43110/Benchmark :)\n - Fixed UiServer socket memory leak"
},
{
"post_id": 34,
"title": "Slides about ZeroNet",
"date_published": 1430081791.43,
"body": "Topics:\n - ZeroNet cryptography\n - How site downloading works\n - Site updates\n - Multi-user sites\n - Current status of the project / Future plans\n\n<a href=\"https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000&slide=id.g9a1cce9ee_0_4\"><img src=\"data/img/slides.png\"/></a>\n\n[Any feedback is welcome!](http://127.0.0.1:43110/Talk.ZeroNetwork.bit/?Topic:18@2/Presentation+about+how+ZeroNet+works) \n\nThanks! :)"
},
{
"post_id": 33,
"title": "Changelog: Apr 24, 2014",
"date_published": 1429873756.187,
"body": " - Revision 120\n - Batched publishing to avoid update flood: Only send one update in every 7 seconds\n - Protection against update flood by adding update queue: Only allows 1 update in every 10 second for the same file\n - Fix stucked notification icon\n - Fix websocket error when writing to not-owned sites"
},
{
"post_id": 32,
"title": "Changelog: Apr 20, 2014",
"date_published": 1429572874,
"body": " - Revision 115\n - For faster pageload times allow browser cache on css/js/font files\n - Support for experimental chrome extension that allows to browse zeronet sites using `http://talk.zeronetwork.bit` and/or `http://zero/1Name2NXVi1RDPDgf5617UoW7xA6YrhM9F`\n - Allow to browse memory content in /Stats\n - Peers uses Site's logger to save some memory\n - Give not-that-good peers on initial PEX if required\n - Allows more than one `--ui_restrict` ip address\n - Disable ssl monkey patching to avoid ssl error in Debian Jessie\n - Fixed websocket error when writing not-allowed files\n - Fixed bigsite file not found error\n - Fixed loading screen stays on screen even after index.html loaded\n\nZeroHello:\n\n - Site links converted to 127.0.0.1:43110 -less if using chrome extension\n\n![direct domains](data/img/direct_domains.png)"
},
{
"post_id": 31,
"title": "Changelog: Apr 17, 2014",
"date_published": 1429319617.201,
"body": " - Revision 101\n - Revision numbering between version\n - Allow passive publishing\n - Start Zeronet when Windows starts option to system tray icon\n - Add peer ping time to publish timeout\n - Passive connected peers always get the updates\n - Pex count bugfix\n - Changed the topright button hamburger utf8 character to more supported one and removed click anim\n - Passive peers only need 3 connection\n - Passive connection store on tracker bugfix\n - Not exits file bugfix\n - You can compare your computer speed (bitcoin crypto, sha512, sqlite access) to mine: http://127.0.0.1:43110/Benchmark :)\n\nZeroTalk:\n\n - Only quote the last message\n - Message height bugfix\n\nZeroHello:\n\n - Changed the burger icon to more supported one\n - Added revision display"
},
{
"post_id": 30,
"title": "Changelog: Apr 16, 2015",
"date_published": 1429135541.581,
"body": "Apr 15:\n\n - Version 0.2.9\n - To get rid of dead ips only send peers over pex that messaged within 2 hour\n - Only ask peers from 2 sources using pex every 20 min\n - Fixed mysterious notification icon disappearings\n - Mark peers as bad if publish is timed out (5s+)"
},
{
"post_id": 29,
"title": "Changelog: Apr 15, 2015",
"date_published": 1429060414.445,
"body": " - Sexy system tray icon with statistics instead of ugly console. (sorry, Windows only yet)\n - Total sent/received bytes stats\n - Faster connections and publishing by don't send passive peers using PEX and don't store them on trackers\n\n![Tray icon](data/img/trayicon.png)"
},
{
"post_id": 28,
"title": "Changelog: Apr 14, 2015",
"date_published": 1428973199.042,
"body": " - Experimental socks proxy support (Tested using Tor)\n - Tracker-less peer exchange between peers\n - Http bittorrent tracker support\n - Option to disable udp connections (udp tracker)\n - Other stability/security fixes\n\nTo use ZeroNet over Tor network start it with `zeronet.py --proxy 127.0.0.1:9050 --disable_udp`\n\nIt's still an experimental feature, there is lot work/fine tuning needed to make it work better and more secure (eg. by supporting hidden service peer addresses to allow connection between Tor clients). \nIn this mode you can only access to sites where there is at least one peer with peer exchange support. (client updated to latest commit)\n\nIf no more bug found i'm going to tag it as 0.2.9 in the next days."
},
{
"post_id": 27,
"title": "Changelog: Apr 9, 2015",
"date_published": 1428626164.266,
"body": " - Packaged windows dependencies for windows to make it easier to install: [ZeroBundle](https://github.com/HelloZeroNet/ZeroBundle)\n - ZeroName site downloaded at startup, so first .bit domain access is faster.\n - Fixed updater bug. (argh)"
},
{
"post_id": 26,
"title": "Changelog: Apr 7, 2015",
"date_published": 1428454413.286,
"body": " - Fix for big sites confirmation display\n - Total objects in memory stat\n - Memory optimizations\n - Retry bad files in every 20min\n - Load files to db when executing external siteSign command\n - Fix for endless reconnect bug\n \nZeroTalk:\n \n - Added experimental P2P new bot\n - Bumped size limit to 20k for every user :)\n - Reply button\n\nExperimenting/researching possibilities of i2p/tor support (probably using DHT)\n\nAny help/suggestion/idea greatly welcomed: [github issue](https://github.com/HelloZeroNet/ZeroNet/issues/60)"
},
{
"post_id": 25,
"title": "Changelog: Apr 2, 2015",
"date_published": 1428022346.555,
"body": " - Better passive mode by making sure to keep 5 active connections\n - Site connection and msgpack unpacker stats\n - No more sha1 hash added to content.json (it was only for backward compatibility with old clients)\n - Keep connection logger object to prevent some exception\n - Retry upnp port opening 3 times\n - Publish received content updates to more peers to make sure the better distribution\n\nZeroTalk: \n\n - Changed edit icon to more clear pencil\n - Single line breaks also breaks the line"
},
{
"post_id": 24,
"title": "Changelog: Mar 29, 2015",
"date_published": 1427758356.109,
"body": " - Version 0.2.8\n - Namecoin (.bit) domain support!\n - Possible to disable backward compatibility with old version to save some memory\n - Faster content publishing (commenting, posting etc.)\n - Display error on internal server errors\n - Better progress bar\n - Crash and bugfixes\n - Removed coppersurfer tracker (its down atm), added eddie4\n - Sorry, the auto updater broken for this version: please overwrite your current `update.py` file with the [latest one from github](https://raw.githubusercontent.com/HelloZeroNet/ZeroNet/master/update.py), run it and restart ZeroNet.\n - Fixed updater\n\n![domain](data/img/domain.png)\n\nZeroName\n\n - New site for resolving namecoin domains and display registered ones\n\n![ZeroName](data/img/zeroname.png)\nZeroHello\n\n - Automatically links to site's domain names if its specificed in content.json `domain` field\n\n"
},
{
"post_id": 22,
"title": "Changelog: Mar 23, 2015",
"date_published": 1427159576.994,
"body": " - Version 0.2.7\n - Plugin system: Allows extend ZeroNet without modify the core source\n - Comes with 3 plugin:\n - Multiuser: User login/logout based on BIP32 master seed, generate new master seed on visit (disabled by default to enable it just remove the disabled- from the directory name)\n - Stats: /Stats url moved to separate plugin for demonstration reasons\n - DonationMessage: Puts a little donation link to the bottom of every page (disabled by default)\n - Reworked module import system\n - Lazy user auth_address generatation\n - Allow to send prompt dialog to user from server-side\n - Update script remembers plugins enabled/disabled status\n - Multiline notifications\n - Cookie parser\n\nZeroHello in multiuser mode:\n\n - Logout button\n - Identicon generated based on logined user xpub address\n\n![Multiuser](data/img/multiuser.png)"
},
{
"post_id": 21,
"title": "Changelog: Mar 19, 2015",
"date_published": 1426818095.915,
"body": " - Version 0.2.6\n - SQL database support that allows easier site development and faster page load times\n - Updated [ZeroFrame API Reference](http://zeronet.readthedocs.org/en/latest/site_development/zeroframe_api_reference/)\n - Added description of new [dbschema.json](http://zeronet.readthedocs.org/en/latest/site_development/dbschema_json/) file\n - SiteStorage class for file operations\n - Incoming connection firstchar errorfix\n - dbRebuild and dbQuery commandline actions\n - [Goals donation page](http://zeronet.readthedocs.org/en/latest/zeronet_development/donate/)\n\nZeroTalk\n\n - Rewritten to use SQL queries (falls back nicely to use json files on older version)"
},
{
"post_id": 20,
"title": "Changelog: Mar 14, 2015",
"date_published": 1426386779.836,
"body": "\n - Save significant amount of memory by remove unused msgpack unpackers\n - Log unhandled exceptions\n - Connection checker error bugfix\n - Working on database support, you can follow the progress on [reddit](http://www.reddit.com/r/zeronet/comments/2yq7e8/a_json_caching_layer_for_quicker_development_and/)\n\n![memory usage](data/img/memory.png)"
},
{
"post_id": 19,
"title": "Changelog: Mar 10, 2015",
"date_published": 1426041044.008,
"body": " - Fixed ZeroBoard and ZeroTalk registration: It was down last days, sorry, I haven't tested it after recent modifications, but I promise I will from now :)\n - Working hard on documentations, after trying some possibilities, I chosen readthedocs.org: http://zeronet.readthedocs.org\n - The API reference is now up-to-date, documented demo sites working method and also updated other parts\n\n[Please, tell me what you want to see in the docs, Thanks!](/1TaLk3zM7ZRskJvrh3ZNCDVGXvkJusPKQ/?Topic:14@2/New+ZeroNet+documentation)"
},
{
"post_id": 18,
"title": "Changelog: Mar 8, 2015",
"date_published": 1425865493.306,
"body": " - [Better uPnp Puncher](https://github.com/HelloZeroNet/ZeroNet/blob/master/src/util/UpnpPunch.py), if you have problems with port opening please try this.\n\nZeroTalk: \n - Comment upvoting\n - Topic groups, if you know any other article about ZeroNet please, post [here](/1TaLk3zM7ZRskJvrh3ZNCDVGXvkJusPKQ/?Topics:8@2/Articles+about+ZeroNet)"
},
{
"post_id": 17,
"title": "Changelog: Mar 5, 2015",
"date_published": 1425606285.111,
"body": " - Connection pinging and timeout\n - Request timeout\n - Verify content at signing (size, allowed files)\n - Smarter coffeescript recompile\n - More detailed stats\n\nZeroTalk: \n - Topic upvote\n - Even more source code realign\n\n![ZeroTalk upvote](data/img/zerotalk-upvote.png)"
},
{
"post_id": 16,
"title": "Changelog: Mar 1, 2015",
"date_published": 1425259087.503,
"body": "ZeroTalk: \n - Reordered source code to allow more more feature in the future\n - Links starting with http://127.0.0.1:43110/ automatically converted to relative links (proxy support)\n - Comment reply (by clicking on comment's creation date)"
},
{
"post_id": 15,
"title": "Changelog: Feb 25, 2015",
"date_published": 1424913197.035,
"body": " - Version 0.2.5\n - Pure-python upnp port opener (Thanks to sirMackk!)\n - Site download progress bar\n - We are also on [Gitter chat](https://gitter.im/HelloZeroNet/ZeroNet)\n - More detailed connection statistics (ping, buff, idle, delay, sent, received)\n - First char failed bugfix\n - Webebsocket disconnect on slow connection bugfix\n - Faster site update\n\n![Progressbar](data/img/progressbar.png)\n\nZeroTalk: \n\n - Sort after 100ms idle\n - Colored usernames\n - Limit reload rate to 500ms\n\nZeroHello\n\n - [iframe render fps test](/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/test/render.html) ([more details on ZeroTalk](/1TaLk3zM7ZRskJvrh3ZNCDVGXvkJusPKQ/?Topic:7@2/Slow+rendering+in+Chrome))\n"
},
{
"post_id": 14,
"title": "Changelog: Feb 24, 2015",
"date_published": 1424734437.473,
"body": " - Version 0.2.4\n - New, experimental network code and protocol\n - peerPing and peerGetFile commands\n - Connection share and reuse between sites\n - Don't retry bad file more than 3 times in 20 min\n - Multi-threaded include file download\n - Really shuffle peers before publish\n - Simple internal stats page: http://127.0.0.1:43110/Stats\n - Publish bugfix for sites with more then 10 peers\n\n_If someone on very limited resources its recommended to wait some time until most of the peers is updates to new network code, because the backward compatibility is a little bit tricky and using more memory._"
},
{
"post_id": 13,
"title": "Changelog: Feb 19, 2015",
"date_published": 1424394659.345,
"body": " - Version 0.2.3\n - One click source code download from github, auto unpack and restart \n - Randomize peers before publish and work start\n - Switched to upnpc-shared.exe it has better virustotal reputation (4/53 vs 19/57)\n\n![Autoupdate](data/img/autoupdate.png)\n\nZeroTalk:\n\n - Topics also sorted by topic creation date\n\n_New content and file changes propagation is a bit broken yet. Already working on better network code that also allows passive content publishing. It will be out in 1-2 weeks._"
},
{
"post_id": 12,
"title": "Changelog: Feb 16, 2015",
"date_published": 1424134864.167,
"body": "Feb 16: \n - Version 0.2.2\n - LocalStorage support using WrapperAPI\n - Bugfix in user management\n\nZeroTalk: \n - Topics ordered by date of last post\n - Mark updated topics since your last visit\n\n![Mark](data/img/zerotalk-mark.png)"
},
{
"post_id": 11,
"title": "Changelog: Feb 14, 2015",
"date_published": 1423922572.778,
"body": " - Version 0.2.1\n - Site size limit: Default 10MB, asks permission to store more, test it here: [ZeroNet windows requirement](/1ZeroPYmW4BGwmT6Z54jwPgTWpbKXtTra)\n - Browser open wait until UiServer started\n - Peer numbers stored in sites.json for faster warmup\n - Silent WSGIHandler error\n - siteSetLimit WrapperAPI command\n - Grand ADMIN permission to wrapperframe\n\nZeroHello: \n\n - Site modify time also include sub-file changes (ZeroTalk last comment)\n - Better changetime date format"
},
{
"post_id": 10,
"title": "Changelog: Feb 11, 2015",
"date_published": 1423701015.643,
"body": "ZeroTalk:\n - Link-type posts\n - You can Edit or Delete your previous Comments and Topics\n - [Uploaded source code to github](https://github.com/HelloZeroNet/ZeroTalk)"
},
{
"post_id": 9,
"title": "Changelog: Feb 10, 2015",
"date_published": 1423532194.094,
"body": " - Progressive publish timeout based on file size\n - Better tracker error log\n - Viewport support in content.json and ZeroFrame API to allow better mobile device layout\n - Escape ZeroFrame notification messages to avoid js injection\n - Allow select all data in QueryJson\n\nZeroTalk:\n - Display topic's comment number and last comment time (requires ZeroNet today's commits from github)\n - Mobile device optimized layout"
},
{
"post_id": 8,
"title": "Changelog: Feb 9, 2015",
"date_published": 1423522387.728,
"body": " - Version 0.2.0\n - New bitcoin ECC lib (pybitcointools)\n - Hide notify errors\n - Include support for content.json\n - File permissions (signer address, filesize, allowed filenames)\n - Multisig ready, new, Bitcoincore compatible sign format\n - Faster, multi threaded content publishing\n - Multiuser, ready, BIP32 based site auth using bitcoin address/privatekey\n - Simple json file query language\n - Websocket api fileGet support\n\nZeroTalk: \n - [Decentralized forum demo](/1TaLk3zM7ZRskJvrh3ZNCDVGXvkJusPKQ/?Home)\n - Permission request/username registration\n - Everyone has an own file that he able to modify, sign and publish decentralized way, without contacting the site owner\n - Topic creation\n - Per topic commenting\n\n![ZeroTalk screenshot](data/img/zerotalk.png)"
},
{
"post_id": 7,
"title": "Changelog: Jan 29, 2015",
"date_published": 1422664081.662,
"body": "The default tracker (tracker.pomf.se) is down since yesterday and its resulting some warning messages. To make it disappear please update to latest version from [GitHub](https://github.com/HelloZeroNet/ZeroNet).\n\nZeroNet:\n- Added better tracker error handling\n- Updated alive [trackers list](https://github.com/HelloZeroNet/ZeroNet/blob/master/src/Site/SiteManager.py) (if anyone have more, please [let us know](http://www.reddit.com/r/zeronet/comments/2sgjsp/changelog/co5y07h))\n\nIf you want to stay updated about the project status: <br>\nWe have created a [@HelloZeronet](https://twitter.com/HelloZeroNet) Twitter account"
},
{
"post_id": 6,
"title": "Changelog: Jan 27, 2015",
"date_published": 1422394676.432,
"body": "ZeroNet\n* You can use `start.py` to start zeronet and open in browser automatically\n* Send timeout 50sec (workaround for some strange problems until we rewrite the network code without zeromq)\n* Reworked Websocket API to make it unified and allow named and unnamed parameters\n* Reload `content.json` when changed using fileWrite API command\n* Some typo fix\n\nZeroBlog\n* Allow edit post on mainpage\n* Also change blog title in `content.json` when modified using inline editor\n\nZeroHello\n* Update failed warning changed to No peers found when seeding own site."
},
{
"post_id": 4,
"title": "Changelog: Jan 25, 2015",
"date_published": 1422224700.583,
"body": "ZeroNet\n- Utf-8 site titles fixed\n- Changes in DebugMedia merger to allow faster, node.js based coffeescript compiler\n\nZeroBlog\n- Inline editor rewritten to simple textarea, so copy/paste, undo/redo now working correctly\n- Read more button to folded posts with `---`\n- ZeroBlog running in demo mode, so anyone can try the editing tools\n- Base html tag fixed\n- Markdown cheat-sheet\n- Confirmation if you want to close the browser tab while editing\n\nHow to update your running blog?\n- Backup your `content.json` and `data.json` files\n- Copy the files in the `data/1BLogC9LN4oPDcruNz3qo1ysa133E9AGg8` directory to your site.\n"
},
{
"post_id": 3,
"title": "How to have a blog like this",
"date_published": 1422140400,
"body": "* Stop ZeroNet\n* Create a new site using `python zeronet.py siteCreate` command\n* Copy all file from **data/1BLogC9LN4oPDcruNz3qo1ysa133E9AGg8** to **data/[Your new site address displayed when executed siteCreate]** directory\n* Delete **data** directory and rename **data-default** to **data** to get a clean, empty site\n* Rename **data/users/content-default.json** file to **data/users/content.json**\n* Execute `zeronet.py siteSign [yoursiteaddress] --inner_path data/users/content.json` to sign commenting rules\n* Start ZeroNet\n* Add/Modify content\n* Click on the `Sign & Publish new content` button\n* Congratulations! Your site is ready to access.\n\n_Note: You have to start commands with `..\\python\\python zeronet.py...` if you downloaded ZeroBundle package_"
},
{
"post_id": 2,
"title": "Changelog: Jan 24, 2015",
"date_published": 1422105774.057,
"body": "* Version 0.1.6\n* Only serve .html files with wrapper frame\n* Http parameter support in url\n* Customizable background-color for wrapper in content.json\n* New Websocket API commands (only allowed on own sites):\n - fileWrite: Modify site's files in hdd from javascript\n - sitePublish: Sign new content and Publish to peers\n* Prompt value support in ZeroFrame (used for prompting privatekey for publishing in ZeroBlog)\n\n---\n\n## Previous changes:\n\n### Jan 20, 2014\n- Version 0.1.5\n- Detect computer wakeup from sleep and acts as startup (check open port, site changes)\n- Announce interval changed from 10min to 20min\n- Delete site files command support\n- Stop unfinished downloads on pause, delete\n- Confirm dialog support to WrapperApi\n\nZeroHello\n- Site Delete menuitem\n- Browser back button doesn't jumps to top\n\n### Jan 19, 2014:\n- Version 0.1.4\n- WIF compatible new private addresses\n- Proper bitcoin address verification, vanity address support: http://127.0.0.1:43110/1ZEro9ZwiZeEveFhcnubFLiN3v7tDL4bz\n- No hash error on worker kill\n- Have you secured your private key? confirmation\n\n### Jan 18, 2014:\n- Version 0.1.3\n- content.json hashing changed from sha1 to sha512 (trimmed to 256bits) for better security, keep hasing to sha1 for backward compatiblility yet\n- Fixed fileserver_port argument parsing\n- Try to ping peer before asking any command if no communication for 20min\n- Ping timeout / retry\n- Reduce websocket bw usage\n- Separate wrapper_key for websocket auth and auth_key to identify user\n- Removed unnecessary from wrapper iframe url\n\nZeroHello:\n- Compatiblilty with 0.1.3 websocket changes while maintaining backward compatibility\n- Better error report on file update fail\n\nZeroBoard:\n- Support for sha512 hashed auth_key, but keeping md5 key support for older versions yet\n\n### Jan 17, 2014:\n- Version 0.1.2\n- Better error message logging\n- Kill workers on download done\n- Retry on socket error\n- Timestamping console messages\n\n### Jan 16:\n- Version to 0.1.1\n- Version info to websocket api\n- Add publisher's zeronet version to content.json\n- Still chasing network publish problems, added more debug info\n\nZeroHello:\n- Your and the latest ZeroNet version added to top right corner (please update if you dont see it)\n"
},
{
"post_id": 1,
"title": "ZeroBlog features",
"date_published": 1422105061,
"body": "Initial version (Jan 24, 2014):\n\n* Site avatar generated by site address\n* Distraction-free inline edit: Post title, date, body, Site title, description, links\n* Post format using [markdown](https://github.com/adam-p/markdown-here/wiki/Markdown-Cheatsheet)\n* Code block [syntax highlight](#code-highlight-demos) using [highlight.js](https://highlightjs.org/)\n* Create & Delete post\n* Sign & Publish from web\n* Fold blog post: Content after first `---` won't appear at listing\n* Shareable, friendly post urls\n\n\nTodo:\n\n* ~~Better content editor (contenteditable seemed like a good idea, but tricky support of copy/paste makes it more pain than gain)~~\n* Image upload to post & blog avatar\n* Paging\n* Searching\n* ~~Quick cheat-sheet using markdown~~\n\n---\n\n## Code highlight demos\n### Server-side site publishing (UiWebsocket.py):\n```py\ndef actionSitePublish(self, to, params):\n\tsite = self.site\n\tif not site.settings[\"own\"]: return self.response(to, \"Forbidden, you can only modify your own sites\")\n\n\t# Signing\n\tsite.loadContent(True) # Reload content.json, ignore errors to make it up-to-date\n\tsigned = site.signContent(params[0]) # Sign using private key sent by user\n\tif signed:\n\t\tself.cmd(\"notification\", [\"done\", \"Private key correct, site signed!\", 5000]) # Display message for 5 sec\n\telse:\n\t\tself.cmd(\"notification\", [\"error\", \"Site sign failed: invalid private key.\"])\n\t\tself.response(to, \"Site sign failed\")\n\t\treturn\n\tsite.loadContent(True) # Load new content.json, ignore errors\n\n\t# Publishing\n\tif not site.settings[\"serving\"]: # Enable site if paused\n\t\tsite.settings[\"serving\"] = True\n\t\tsite.saveSettings()\n\t\tsite.announce()\n\n\tpublished = site.publish(5) # Publish to 5 peer\n\n\tif published>0: # Successfuly published\n\t\tself.cmd(\"notification\", [\"done\", \"Site published to %s peers.\" % published, 5000])\n\t\tself.response(to, \"ok\")\n\t\tsite.updateWebsocket() # Send updated site data to local websocket clients\n\telse:\n\t\tif len(site.peers) == 0:\n\t\t\tself.cmd(\"notification\", [\"info\", \"No peers found, but your site is ready to access.\"])\n\t\t\tself.response(to, \"No peers found, but your site is ready to access.\")\n\t\telse:\n\t\t\tself.cmd(\"notification\", [\"error\", \"Site publish failed.\"])\n\t\t\tself.response(to, \"Site publish failed.\")\n```\n\n\n### Client-side site publish (ZeroBlog.coffee)\n```coffee\n# Sign and Publish site\npublish: =>\n\tif not @server_info.ip_external # No port open\n\t\t@cmd \"wrapperNotification\", [\"error\", \"To publish the site please open port <b>#{@server_info.fileserver_port}</b> on your router\"]\n\t\treturn false\n\t@cmd \"wrapperPrompt\", [\"Enter your private key:\", \"password\"], (privatekey) => # Prompt the private key\n\t\t$(\".publishbar .button\").addClass(\"loading\")\n\t\t@cmd \"sitePublish\", [privatekey], (res) =>\n\t\t\t$(\".publishbar .button\").removeClass(\"loading\")\n\t\t\t@log \"Publish result:\", res\n\n\treturn false # Ignore link default event\n```\n\n"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 29 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 23 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 35 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 26 KiB

View File

@ -1,14 +0,0 @@
{
"address": "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT",
"files": {
"data.json": {
"sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906",
"size": 505
}
},
"inner_path": "data/test_include/content.json",
"modified": 1470340816.513,
"signs": {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": "GxF2ZD0DaMx+CuxafnnRx+IkWTrXubcmTHaJIPyemFpzCvbSo6DyjstN8T3qngFhYIZI/MkcG4ogStG0PLv6p3w="
}
}

View File

@ -1,37 +0,0 @@
{
"next_topic_id": 1,
"topics": [],
"next_message_id": 5,
"comments": {
"1@2": [
{
"comment_id": 1,
"body": "New user test!",
"added": 1423442049
},
{
"comment_id": 2,
"body": "test 321",
"added": 1423531445
},
{
"comment_id": 3,
"body": "0.2.4 test.",
"added": 1424133003
}
]
},
"topic_votes": {
"1@2": 1,
"1@6": 1,
"1@69": 1,
"607@69": 1
},
"comment_votes": {
"35@2": 1,
"7@64": 1,
"8@64": 1,
"50@2": 1,
"13@77": 1
}
}

View File

@ -1,15 +0,0 @@
{
"cert_auth_type": "web",
"cert_sign": "G4YB7y749GI6mJboyI7cNNfyMwOS0rcVXLmgq8qmCC4TCaRqup3TGWm8hzeru7+B5iXhq19Ruz286bNVKgNbnwU=",
"cert_user_id": "newzeroid@zeroid.bit",
"files": {
"data.json": {
"sha512": "2378ef20379f1db0c3e2a803bfbfda2b68515968b7e311ccc604406168969d34",
"size": 161
}
},
"modified": 1432554679.913,
"signs": {
"1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q": "GzX/Ht6ms1dOnqB3kVENvDnxpH+mqA0Zlg3hWy0iwgxpyxWcA4zgmwxcEH41BN9RrvCaxgSd2m1SG1/8qbQPzDY="
}
}

Some files were not shown because too many files have changed in this diff Show More