From e8af5db2e8ea0c565b618b7bc9ed5ef8d8dcecc4 Mon Sep 17 00:00:00 2001 From: shortcutme Date: Tue, 19 Nov 2019 01:43:39 +0100 Subject: [PATCH] Keep track gevent block number, remove Benchmark from stats plugin --- plugins/Stats/StatsPlugin.py | 360 +---------------------------------- src/Debug/Debug.py | 3 + 2 files changed, 6 insertions(+), 357 deletions(-) diff --git a/plugins/Stats/StatsPlugin.py b/plugins/Stats/StatsPlugin.py index 6e4bce16..ca7c8281 100644 --- a/plugins/Stats/StatsPlugin.py +++ b/plugins/Stats/StatsPlugin.py @@ -74,7 +74,7 @@ class UiRequestPlugin(object): yield "rev%s | " % config.rev yield "%s | " % main.file_server.ip_external_list yield "Port: %s | " % main.file_server.port - yield "IP Network: %s | " % main.file_server.supported_ip_types + yield "Network: %s | " % main.file_server.supported_ip_types yield "Opened: %s | " % main.file_server.port_opened yield "Crypt: %s, TLSv1.3: %s | " % (CryptConnection.manager.crypt_supported, CryptConnection.ssl.HAS_TLSv1_3) yield "In: %.2fMB, Out: %.2fMB | " % ( @@ -82,7 +82,8 @@ class UiRequestPlugin(object): float(main.file_server.bytes_sent) / 1024 / 1024 ) yield "Peerid: %s | " % main.file_server.peer_id - yield "Time correction: %.2fs" % main.file_server.getTimecorrection() + yield "Time: %.2fs | " % main.file_server.getTimecorrection() + yield "Blocks: %s" % Debug.num_block try: import psutil @@ -479,361 +480,6 @@ class UiRequestPlugin(object): gc.collect() # Implicit grabage collection - @helper.encodeResponse - def actionBenchmark(self): - import sys - import gc - from contextlib import contextmanager - - output = self.sendHeader() - - if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local: - yield "This function is disabled on this proxy" - return - - @contextmanager - def benchmark(name, standard): - self.log.debug("Benchmark: %s" % name) - s = time.time() - output(b"- %s" % name.encode()) - try: - yield 1 - except Exception as err: - self.log.exception(err) - output(b"
! Error: %s
" % Debug.formatException(err).encode()) - taken = time.time() - s - if taken > 0: - multipler = standard / taken - else: - multipler = 99 - if multipler < 0.3: - speed = "Sloooow" - elif multipler < 0.5: - speed = "Ehh" - elif multipler < 0.8: - speed = "Goodish" - elif multipler < 1.2: - speed = "OK" - elif multipler < 1.7: - speed = "Fine" - elif multipler < 2.5: - speed = "Fast" - elif multipler < 3.5: - speed = "WOW" - else: - speed = "Insane!!" - output(b"%.3fs [x%.2f: %s]
" % (taken, multipler, speed.encode())) - time.sleep(0.01) - - yield """ - - """ - - yield "Benchmarking ZeroNet %s (rev%s) Python %s on: %s...
" % (config.version, config.rev, sys.version, sys.platform) - - t = time.time() - - # CryptBitcoin - yield "
CryptBitcoin:
" - from Crypt import CryptBitcoin - - # seed = CryptBitcoin.newSeed() - # yield "- Seed: %s
" % seed - seed = "e180efa477c63b0f2757eac7b1cce781877177fe0966be62754ffd4c8592ce38" - - with benchmark("hdPrivatekey x 10", 0.7): - for i in range(10): - privatekey = CryptBitcoin.hdPrivatekey(seed, i * 10) - yield "." - valid = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk" - assert privatekey == valid, "%s != %s" % (privatekey, valid) - - data = "Hello" * 1024 # 5k - with benchmark("sign x 10", 0.35): - for i in range(10): - yield "." - sign = CryptBitcoin.sign(data, privatekey) - valid = "G1GXaDauZ8vX/N9Jn+MRiGm9h+I94zUhDnNYFaqMGuOiBHB+kp4cRPZOL7l1yqK5BHa6J+W97bMjvTXtxzljp6w=" - assert sign == valid, "%s != %s" % (sign, valid) - - address = CryptBitcoin.privatekeyToAddress(privatekey) - for lib_verify in ["btctools", "openssl", "libsecp256k1"]: - try: - CryptBitcoin.loadLib(lib_verify) - loaded = True - if lib_verify == "openssl": - yield "+ Loaded lib: %s
" % html.escape(str(CryptBitcoin.bitcoin.core.key._ssl)) - elif lib_verify == "libsecp256k1": - import coincurve - yield "+ Loaded lib: %s
" % type(coincurve._libsecp256k1.lib).__name__ - except Exception as err: - yield "- Error loading %s: %s
" % (lib_verify, err) - loaded = False - if not loaded: - continue - with benchmark("%s verify x 100" % lib_verify, 0.37): - for i in range(100): - if i % 10 == 0: - yield "." - ok = CryptBitcoin.verify(data, address, sign, lib_verify=lib_verify) - assert ok, "does not verify from %s" % address - - # CryptHash - yield "
CryptHash:
" - from Crypt import CryptHash - import io - - data = io.BytesIO(b"Hello" * 1024 * 1024) # 5m - with benchmark("sha256 5M x 10", 0.6): - for i in range(10): - data.seek(0) - hash = CryptHash.sha256sum(data) - yield "." - valid = "8cd629d9d6aff6590da8b80782a5046d2673d5917b99d5603c3dcb4005c45ffa" - assert hash == valid, "%s != %s" % (hash, valid) - - data = io.BytesIO(b"Hello" * 1024 * 1024) # 5m - with benchmark("sha512 5M x 10", 0.6): - for i in range(10): - data.seek(0) - hash = CryptHash.sha512sum(data) - yield "." - valid = "9ca7e855d430964d5b55b114e95c6bbb114a6d478f6485df93044d87b108904d" - assert hash == valid, "%s != %s" % (hash, valid) - - with benchmark("os.urandom(256) x 1000", 0.0065): - for i in range(10): - for y in range(100): - data = os.urandom(256) - yield "." - - # Msgpack - from util import Msgpack - yield "
Msgpack: (version: %s)
" % ".".join(map(str, Msgpack.msgpack.version)) - binary = b'fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv' - data = OrderedDict( - sorted({"int": 1024 * 1024 * 1024, "float": 12345.67890, "text": "hello" * 1024, "binary": binary}.items()) - ) - data_packed_valid = b'\x84\xa6binary\xc5\x01\x00fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv\xa5float\xcb@\xc8\x1c\xd6\xe61\xf8\xa1\xa3int\xce@\x00\x00\x00\xa4text\xda\x14\x00hellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohello' - with benchmark("pack 5K x 10 000", 0.78): - for i in range(10): - for y in range(1000): - data_packed = Msgpack.pack(data) - yield "." - assert data_packed == data_packed_valid, "%s
!=
%s" % (repr(data_packed), repr(data_packed_valid)) - - with benchmark("unpack 5K x 10 000", 1.2): - for i in range(10): - for y in range(1000): - data_unpacked = Msgpack.unpack(data_packed, decode=False) - yield "." - assert data == data_unpacked, "%s != %s" % (data_unpacked, data) - - for fallback in [True, False]: - with benchmark("streaming unpack 5K x 10 000 (fallback: %s)" % fallback, 1.4): - for i in range(10): - unpacker = Msgpack.getUnpacker(decode=False, fallback=fallback) - for y in range(1000): - unpacker.feed(data_packed) - for data_unpacked in unpacker: - pass - yield "." - assert data == data_unpacked, "%s != %s" % (data_unpacked, data) - - # Db - import sqlite3 - yield "
Db: (version: %s, API: %s)
" % (sqlite3.sqlite_version, sqlite3.version) - - schema = { - "db_name": "TestDb", - "db_file": "%s/benchmark.db" % config.data_dir, - "maps": { - ".*": { - "to_table": { - "test": "test" - } - } - }, - "tables": { - "test": { - "cols": [ - ["test_id", "INTEGER"], - ["title", "TEXT"], - ["json_id", "INTEGER REFERENCES json (json_id)"] - ], - "indexes": ["CREATE UNIQUE INDEX test_key ON test(test_id, json_id)"], - "schema_changed": 1426195822 - } - } - } - - if os.path.isfile("%s/benchmark.db" % config.data_dir): - os.unlink("%s/benchmark.db" % config.data_dir) - - with benchmark("Open x 10", 0.13): - for i in range(10): - db = Db.Db(schema, "%s/benchmark.db" % config.data_dir) - db.checkTables() - db.close() - yield "." - - db = Db.Db(schema, "%s/benchmark.db" % config.data_dir) - db.checkTables() - import json - - with benchmark("Insert x 10 x 1000", 1.0): - for u in range(10): # 10 user - data = {"test": []} - for i in range(1000): # 1000 line of data - data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)}) - json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w")) - db.updateJson("%s/test_%s.json" % (config.data_dir, u)) - os.unlink("%s/test_%s.json" % (config.data_dir, u)) - yield "." - - with benchmark("Buffered insert x 100 x 100", 1.3): - cur = db.getCursor() - cur.logging = False - for u in range(100, 200): # 100 user - data = {"test": []} - for i in range(100): # 1000 line of data - data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)}) - json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w")) - db.updateJson("%s/test_%s.json" % (config.data_dir, u), cur=cur) - os.unlink("%s/test_%s.json" % (config.data_dir, u)) - if u % 10 == 0: - yield "." - - yield " + Total rows in db: %s
" % db.execute("SELECT COUNT(*) AS num FROM test").fetchone()[0] - - with benchmark("Indexed query x 1000", 0.25): - found = 0 - cur = db.getCursor() - cur.logging = False - for i in range(1000): # 1000x by test_id - res = cur.execute("SELECT * FROM test WHERE test_id = %s" % i) - for row in res: - found += 1 - if i % 100 == 0: - yield "." - - assert found == 20000, "Found: %s != 20000" % found - - with benchmark("Not indexed query x 100", 0.6): - found = 0 - cur = db.getCursor() - cur.logging = False - for i in range(100): # 1000x by test_id - res = cur.execute("SELECT * FROM test WHERE json_id = %s" % i) - for row in res: - found += 1 - if i % 10 == 0: - yield "." - - assert found == 18900, "Found: %s != 18900" % found - - with benchmark("Like query x 100", 1.8): - found = 0 - cur = db.getCursor() - cur.logging = False - for i in range(100): # 1000x by test_id - res = cur.execute("SELECT * FROM test WHERE title LIKE '%%message %s%%'" % i) - for row in res: - found += 1 - if i % 10 == 0: - yield "." - - assert found == 38900, "Found: %s != 11000" % found - - db.close() - if os.path.isfile("%s/benchmark.db" % config.data_dir): - os.unlink("%s/benchmark.db" % config.data_dir) - - gc.collect() # Implicit grabage collection - - # Zip - yield "
Compression:
" - import zipfile - test_data = b"Test" * 1024 - file_name = b"\xc3\x81rv\xc3\xadzt\xc5\xb0r\xc5\x91t\xc3\xbck\xc3\xb6r\xc3\xb3g\xc3\xa9p\xe4\xb8\xad\xe5\x8d\x8e%s.txt".decode("utf8") - - with benchmark("Zip pack x 10", 0.12): - for i in range(10): - with zipfile.ZipFile('%s/test.zip' % config.data_dir, 'w') as archive: - for y in range(100): - zip_info = zipfile.ZipInfo(file_name % y, (1980,1,1,0,0,0)) - zip_info.compress_type = zipfile.ZIP_DEFLATED - zip_info.create_system = 3 - zip_info.flag_bits = 0 - zip_info.external_attr = 25165824 - archive.writestr(zip_info, test_data) - yield "." - - hash = CryptHash.sha512sum(open("%s/test.zip" % config.data_dir, "rb")) - valid = "f630fece29fff1cc8dbf454e47a87fea2746a4dbbd2ceec098afebab45301562" - assert hash == valid, "Invalid hash: %s != %s
" % (hash, valid) - - with benchmark("Zip unpack x 10", 0.2): - for i in range(10): - with zipfile.ZipFile('%s/test.zip' % config.data_dir) as archive: - for y in range(100): - data = archive.open(file_name % y).read() - assert archive.open(file_name % y).read() == test_data, "Invalid data: %s..." % data[0:30] - yield "." - - if os.path.isfile("%s/test.zip" % config.data_dir): - os.unlink("%s/test.zip" % config.data_dir) - - # gz, bz2, xz - import tarfile - import gzip - - # Monkey patch _init_write_gz to use fixed date in order to keep the hash independent from datetime - def nodate_write_gzip_header(self): - self._write_mtime = 0 - original_write_gzip_header(self) - - original_write_gzip_header = gzip.GzipFile._write_gzip_header - gzip.GzipFile._write_gzip_header = nodate_write_gzip_header - - test_data_io = io.BytesIO(b"Test" * 1024) - archive_formats = { - "gz": {"hash": "4704ebd8c987ed6f833059f1de9c475d443b0539b8d4c4cb8b49b26f7bbf2d19", "time_pack": 0.3, "time_unpack": 0.2}, - "bz2": {"hash": "90cba0b4d9abaa37b830bf37e4adba93bfd183e095b489ebee62aaa94339f3b5", "time_pack": 2.0, "time_unpack": 0.5}, - "xz": {"hash": "37abc16d552cfd4a495cb2acbf8b1d5877631d084f6571f4d6544bc548c69bae", "time_pack": 1.4, "time_unpack": 0.2} - } - for ext, format_data in archive_formats.items(): - archive_path = '%s/test.tar.%s' % (config.data_dir, ext) - with benchmark("Tar.%s pack x 10" % ext, format_data["time_pack"]): - for i in range(10): - with tarfile.open(archive_path, 'w:%s' % ext) as archive: - for y in range(100): - test_data_io.seek(0) - tar_info = tarfile.TarInfo(file_name % y) - tar_info.size = 4 * 1024 - archive.addfile(tar_info, test_data_io) - yield "." - - hash = CryptHash.sha512sum(open("%s/test.tar.%s" % (config.data_dir, ext), "rb")) - valid = format_data["hash"] - assert hash == valid, "Invalid hash: %s != %s
" % (hash, valid) - - archive_size = os.path.getsize(archive_path) / 1024 - with benchmark("Tar.%s unpack (%.2fkB) x 10" % (ext, archive_size), format_data["time_unpack"]): - for i in range(10): - with tarfile.open(archive_path, 'r:%s' % ext) as archive: - for y in range(100): - assert archive.extractfile(file_name % y).read() == test_data - yield "." - - if os.path.isfile(archive_path): - os.unlink(archive_path) - - yield "
Done. Total: %.2fs" % (time.time() - t) - @helper.encodeResponse def actionGcCollect(self): import gc diff --git a/src/Debug/Debug.py b/src/Debug/Debug.py index 0a70354d..18fb2e29 100644 --- a/src/Debug/Debug.py +++ b/src/Debug/Debug.py @@ -70,13 +70,16 @@ import gevent import time +num_block = 0 def testBlock(): + global num_block logging.debug("Gevent block checker started") last_time = time.time() while 1: time.sleep(1) if time.time() - last_time > 1.1: logging.debug("Gevent block detected: %s" % (time.time() - last_time - 1)) + num_block += 1 last_time = time.time() gevent.spawn(testBlock)