2015-07-12 20:36:46 +02:00
|
|
|
import time
|
2019-03-16 02:27:04 +01:00
|
|
|
import html
|
2015-07-12 20:36:46 +02:00
|
|
|
import os
|
2018-08-29 19:58:18 +02:00
|
|
|
import json
|
2020-11-03 02:44:31 +01:00
|
|
|
import sys
|
|
|
|
import itertools
|
2015-07-12 20:36:46 +02:00
|
|
|
|
version 0.2.7, plugin system, multiuser plugin for zeroproxies, reworked imports, cookie parse, stats moved to plugin, usermanager class, dont generate site auth on listing, multiline notifications, allow server side prompt from user, update script keep plugins disabled status
2015-03-24 01:33:09 +01:00
|
|
|
from Plugin import PluginManager
|
2015-04-12 23:59:22 +02:00
|
|
|
from Config import config
|
2019-03-16 02:27:04 +01:00
|
|
|
from util import helper
|
2019-03-23 03:40:42 +01:00
|
|
|
from Debug import Debug
|
2019-04-15 12:31:33 +02:00
|
|
|
from Db import Db
|
version 0.2.7, plugin system, multiuser plugin for zeroproxies, reworked imports, cookie parse, stats moved to plugin, usermanager class, dont generate site auth on listing, multiline notifications, allow server side prompt from user, update script keep plugins disabled status
2015-03-24 01:33:09 +01:00
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
|
version 0.2.7, plugin system, multiuser plugin for zeroproxies, reworked imports, cookie parse, stats moved to plugin, usermanager class, dont generate site auth on listing, multiline notifications, allow server side prompt from user, update script keep plugins disabled status
2015-03-24 01:33:09 +01:00
|
|
|
@PluginManager.registerTo("UiRequest")
|
|
|
|
class UiRequestPlugin(object):
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2017-02-27 00:00:27 +01:00
|
|
|
def formatTableRow(self, row, class_name=""):
|
2015-07-12 20:36:46 +02:00
|
|
|
back = []
|
|
|
|
for format, val in row:
|
|
|
|
if val is None:
|
|
|
|
formatted = "n/a"
|
|
|
|
elif format == "since":
|
|
|
|
if val:
|
|
|
|
formatted = "%.0f" % (time.time() - val)
|
|
|
|
else:
|
|
|
|
formatted = "n/a"
|
|
|
|
else:
|
|
|
|
formatted = format % val
|
|
|
|
back.append("<td>%s</td>" % formatted)
|
2019-03-16 02:27:04 +01:00
|
|
|
return "<tr class='%s'>%s</tr>" % (class_name, "".join(back))
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
def getObjSize(self, obj, hpy=None):
|
|
|
|
if hpy:
|
|
|
|
return float(hpy.iso(obj).domisize) / 1024
|
|
|
|
else:
|
|
|
|
return 0
|
|
|
|
|
2020-11-03 02:44:31 +01:00
|
|
|
def renderHead(self):
|
2019-04-15 12:31:33 +02:00
|
|
|
import main
|
2020-11-03 02:44:31 +01:00
|
|
|
from Crypt import CryptConnection
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
# Memory
|
2018-08-29 19:56:27 +02:00
|
|
|
yield "rev%s | " % config.rev
|
2019-01-26 20:42:27 +01:00
|
|
|
yield "%s | " % main.file_server.ip_external_list
|
2018-08-29 19:56:27 +02:00
|
|
|
yield "Port: %s | " % main.file_server.port
|
2019-11-19 01:43:39 +01:00
|
|
|
yield "Network: %s | " % main.file_server.supported_ip_types
|
2018-08-29 19:56:27 +02:00
|
|
|
yield "Opened: %s | " % main.file_server.port_opened
|
2019-08-12 17:56:06 +02:00
|
|
|
yield "Crypt: %s, TLSv1.3: %s | " % (CryptConnection.manager.crypt_supported, CryptConnection.ssl.HAS_TLSv1_3)
|
2018-08-29 19:56:27 +02:00
|
|
|
yield "In: %.2fMB, Out: %.2fMB | " % (
|
|
|
|
float(main.file_server.bytes_recv) / 1024 / 1024,
|
|
|
|
float(main.file_server.bytes_sent) / 1024 / 1024
|
|
|
|
)
|
|
|
|
yield "Peerid: %s | " % main.file_server.peer_id
|
2019-11-19 01:43:39 +01:00
|
|
|
yield "Time: %.2fs | " % main.file_server.getTimecorrection()
|
|
|
|
yield "Blocks: %s" % Debug.num_block
|
2018-08-29 19:56:27 +02:00
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
try:
|
|
|
|
import psutil
|
|
|
|
process = psutil.Process(os.getpid())
|
|
|
|
mem = process.get_memory_info()[0] / float(2 ** 20)
|
|
|
|
yield "Mem: %.2fMB | " % mem
|
|
|
|
yield "Threads: %s | " % len(process.threads())
|
|
|
|
yield "CPU: usr %.2fs sys %.2fs | " % process.cpu_times()
|
|
|
|
yield "Files: %s | " % len(process.open_files())
|
|
|
|
yield "Sockets: %s | " % len(process.connections())
|
|
|
|
yield "Calc size <a href='?size=1'>on</a> <a href='?size=0'>off</a>"
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
yield "<br>"
|
|
|
|
|
2020-11-03 02:44:31 +01:00
|
|
|
def renderConnectionsTable(self):
|
|
|
|
import main
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
# Connections
|
2018-09-02 02:19:51 +02:00
|
|
|
yield "<b>Connections</b> (%s, total made: %s, in: %s, out: %s):<br>" % (
|
2020-11-03 02:44:31 +01:00
|
|
|
len(main.file_server.connections), main.file_server.last_connection_id,
|
|
|
|
main.file_server.num_incoming, main.file_server.num_outgoing
|
2015-07-12 20:36:46 +02:00
|
|
|
)
|
2017-02-27 00:00:27 +01:00
|
|
|
yield "<table class='connections'><tr> <th>id</th> <th>type</th> <th>ip</th> <th>open</th> <th>crypt</th> <th>ping</th>"
|
2016-11-07 23:00:17 +01:00
|
|
|
yield "<th>buff</th> <th>bad</th> <th>idle</th> <th>open</th> <th>delay</th> <th>cpu</th> <th>out</th> <th>in</th> <th>last sent</th>"
|
2018-08-29 19:57:38 +02:00
|
|
|
yield "<th>wait</th> <th>version</th> <th>time</th> <th>sites</th> </tr>"
|
2015-07-12 20:36:46 +02:00
|
|
|
for connection in main.file_server.connections:
|
|
|
|
if "cipher" in dir(connection.sock):
|
|
|
|
cipher = connection.sock.cipher()[0]
|
2018-10-05 15:08:28 +02:00
|
|
|
tls_version = connection.sock.version()
|
2015-07-12 20:36:46 +02:00
|
|
|
else:
|
|
|
|
cipher = connection.crypt
|
2018-10-05 15:08:28 +02:00
|
|
|
tls_version = ""
|
2018-08-29 19:57:38 +02:00
|
|
|
if "time" in connection.handshake and connection.last_ping_delay:
|
|
|
|
time_correction = connection.handshake["time"] - connection.handshake_time - connection.last_ping_delay
|
|
|
|
else:
|
|
|
|
time_correction = 0.0
|
2015-07-12 20:36:46 +02:00
|
|
|
yield self.formatTableRow([
|
|
|
|
("%3d", connection.id),
|
|
|
|
("%s", connection.type),
|
|
|
|
("%s:%s", (connection.ip, connection.port)),
|
|
|
|
("%s", connection.handshake.get("port_opened")),
|
2018-10-05 15:08:28 +02:00
|
|
|
("<span title='%s %s'>%s</span>", (cipher, tls_version, connection.crypt)),
|
2015-07-12 20:36:46 +02:00
|
|
|
("%6.3f", connection.last_ping_delay),
|
|
|
|
("%s", connection.incomplete_buff_recv),
|
2016-03-12 23:09:26 +01:00
|
|
|
("%s", connection.bad_actions),
|
2015-07-12 20:36:46 +02:00
|
|
|
("since", max(connection.last_send_time, connection.last_recv_time)),
|
|
|
|
("since", connection.start_time),
|
2018-08-29 19:57:38 +02:00
|
|
|
("%.3f", max(-1, connection.last_sent_time - connection.last_send_time)),
|
2017-02-27 00:00:27 +01:00
|
|
|
("%.3f", connection.cpu_time),
|
2019-03-16 02:27:04 +01:00
|
|
|
("%.0fk", connection.bytes_sent / 1024),
|
|
|
|
("%.0fk", connection.bytes_recv / 1024),
|
2017-10-13 01:16:44 +02:00
|
|
|
("<span title='Recv: %s'>%s</span>", (connection.last_cmd_recv, connection.last_cmd_sent)),
|
2019-03-16 02:27:04 +01:00
|
|
|
("%s", list(connection.waiting_requests.keys())),
|
2015-07-12 20:36:46 +02:00
|
|
|
("%s r%s", (connection.handshake.get("version"), connection.handshake.get("rev", "?"))),
|
2018-08-29 19:57:38 +02:00
|
|
|
("%.2fs", time_correction),
|
2016-04-25 02:23:19 +02:00
|
|
|
("%s", connection.sites)
|
2015-07-12 20:36:46 +02:00
|
|
|
])
|
|
|
|
yield "</table>"
|
|
|
|
|
2020-11-03 02:44:31 +01:00
|
|
|
def renderTrackers(self):
|
2018-08-26 22:53:01 +02:00
|
|
|
# Trackers
|
|
|
|
yield "<br><br><b>Trackers:</b><br>"
|
|
|
|
yield "<table class='trackers'><tr> <th>address</th> <th>request</th> <th>successive errors</th> <th>last_request</th></tr>"
|
2020-11-03 02:44:31 +01:00
|
|
|
from Site import SiteAnnouncer # importing at the top of the file breaks plugins
|
2019-04-15 12:31:33 +02:00
|
|
|
for tracker_address, tracker_stat in sorted(SiteAnnouncer.global_stats.items()):
|
2018-08-26 22:53:01 +02:00
|
|
|
yield self.formatTableRow([
|
|
|
|
("%s", tracker_address),
|
|
|
|
("%s", tracker_stat["num_request"]),
|
|
|
|
("%s", tracker_stat["num_error"]),
|
|
|
|
("%.0f min ago", min(999, (time.time() - tracker_stat["time_request"]) / 60))
|
|
|
|
])
|
|
|
|
yield "</table>"
|
|
|
|
|
|
|
|
if "AnnounceShare" in PluginManager.plugin_manager.plugin_names:
|
|
|
|
yield "<br><br><b>Shared trackers:</b><br>"
|
2018-08-29 19:57:54 +02:00
|
|
|
yield "<table class='trackers'><tr> <th>address</th> <th>added</th> <th>found</th> <th>latency</th> <th>successive errors</th> <th>last_success</th></tr>"
|
2018-08-26 22:53:01 +02:00
|
|
|
from AnnounceShare import AnnounceSharePlugin
|
2019-03-16 02:27:04 +01:00
|
|
|
for tracker_address, tracker_stat in sorted(AnnounceSharePlugin.tracker_storage.getTrackers().items()):
|
2018-08-26 22:53:01 +02:00
|
|
|
yield self.formatTableRow([
|
|
|
|
("%s", tracker_address),
|
|
|
|
("%.0f min ago", min(999, (time.time() - tracker_stat["time_added"]) / 60)),
|
2018-08-29 19:57:54 +02:00
|
|
|
("%.0f min ago", min(999, (time.time() - tracker_stat.get("time_found", 0)) / 60)),
|
2018-08-26 22:53:01 +02:00
|
|
|
("%.3fs", tracker_stat["latency"]),
|
|
|
|
("%s", tracker_stat["num_error"]),
|
|
|
|
("%.0f min ago", min(999, (time.time() - tracker_stat["time_success"]) / 60)),
|
|
|
|
])
|
|
|
|
yield "</table>"
|
|
|
|
|
2020-11-03 02:44:31 +01:00
|
|
|
def renderTor(self):
|
|
|
|
import main
|
2019-03-16 02:27:04 +01:00
|
|
|
yield "<br><br><b>Tor hidden services (status: %s):</b><br>" % main.file_server.tor_manager.status
|
|
|
|
for site_address, onion in list(main.file_server.tor_manager.site_onions.items()):
|
|
|
|
yield "- %-34s: %s<br>" % (site_address, onion)
|
Version 0.3.5, Rev830, Full Tor mode support with hidden services, Onion stats in Sidebar, GeoDB download fix using Tor, Gray out disabled sites in Stats page, Tor hidden service status in stat page, Benchmark sha256, Skyts tracker out expodie in, 2 new tracker using ZeroNet protocol, Keep SSL cert option between restarts, SSL Certificate pinning support for connections, Site lock support for connections, Certificate pinned connections using implicit SSL, Flood protection whitelist support, Foreign keys support for DB layer, Not support for SQL query helper, 0 length file get bugfix, Pex onion address support, Faster port testing, Faster uPnP port opening, Need connections more often on owned sites, Delay ZeroHello startup message if port check or Tor manager not ready yet, Use lockfiles to avoid double start, Save original socket on proxy monkey patching to get ability to connect localhost directly, Handle atomic write errors, Broken gevent https workaround helper, Rsa crypt functions, Plugin to Bootstrap using ZeroNet protocol
2016-01-05 00:20:52 +01:00
|
|
|
|
2020-11-03 02:44:31 +01:00
|
|
|
def renderDbStats(self):
|
2016-09-07 17:42:40 +02:00
|
|
|
yield "<br><br><b>Db</b>:<br>"
|
2019-04-15 12:31:33 +02:00
|
|
|
for db in Db.opened_dbs:
|
2018-08-29 19:58:18 +02:00
|
|
|
tables = [row["name"] for row in db.execute("SELECT name FROM sqlite_master WHERE type = 'table'").fetchall()]
|
|
|
|
table_rows = {}
|
|
|
|
for table in tables:
|
|
|
|
table_rows[table] = db.execute("SELECT COUNT(*) AS c FROM %s" % table).fetchone()["c"]
|
|
|
|
db_size = os.path.getsize(db.db_path) / 1024.0 / 1024.0
|
|
|
|
yield "- %.3fs: %s %.3fMB, table rows: %s<br>" % (
|
2019-03-16 02:27:04 +01:00
|
|
|
time.time() - db.last_query_time, db.db_path, db_size, json.dumps(table_rows, sort_keys=True)
|
2018-08-29 19:58:18 +02:00
|
|
|
)
|
|
|
|
|
2020-11-03 02:44:31 +01:00
|
|
|
def renderSites(self):
|
2015-07-12 20:36:46 +02:00
|
|
|
yield "<br><br><b>Sites</b>:"
|
|
|
|
yield "<table>"
|
2015-08-16 11:51:00 +02:00
|
|
|
yield "<tr><th>address</th> <th>connected</th> <th title='connected/good/total'>peers</th> <th>content.json</th> <th>out</th> <th>in</th> </tr>"
|
2019-03-16 02:27:04 +01:00
|
|
|
for site in list(self.server.sites.values()):
|
2015-07-12 20:36:46 +02:00
|
|
|
yield self.formatTableRow([
|
|
|
|
(
|
2017-02-27 00:00:27 +01:00
|
|
|
"""<a href='#' onclick='document.getElementById("peers_%s").style.display="initial"; return false'>%s</a>""",
|
|
|
|
(site.address, site.address)
|
2015-07-12 20:36:46 +02:00
|
|
|
),
|
2019-03-16 02:27:04 +01:00
|
|
|
("%s", [peer.connection.id for peer in list(site.peers.values()) if peer.connection and peer.connection.connected]),
|
2015-07-12 20:36:46 +02:00
|
|
|
("%s/%s/%s", (
|
2019-03-16 02:27:04 +01:00
|
|
|
len([peer for peer in list(site.peers.values()) if peer.connection and peer.connection.connected]),
|
2015-07-12 20:36:46 +02:00
|
|
|
len(site.getConnectablePeers(100)),
|
|
|
|
len(site.peers)
|
|
|
|
)),
|
2016-09-07 17:42:50 +02:00
|
|
|
("%s (loaded: %s)", (
|
|
|
|
len(site.content_manager.contents),
|
2019-03-16 02:27:04 +01:00
|
|
|
len([key for key, val in dict(site.content_manager.contents).items() if val])
|
2016-09-07 17:42:50 +02:00
|
|
|
)),
|
2019-03-16 02:27:04 +01:00
|
|
|
("%.0fk", site.settings.get("bytes_sent", 0) / 1024),
|
|
|
|
("%.0fk", site.settings.get("bytes_recv", 0) / 1024),
|
2017-02-27 00:00:27 +01:00
|
|
|
], "serving-%s" % site.settings["serving"])
|
Version 0.3.5, Rev830, Full Tor mode support with hidden services, Onion stats in Sidebar, GeoDB download fix using Tor, Gray out disabled sites in Stats page, Tor hidden service status in stat page, Benchmark sha256, Skyts tracker out expodie in, 2 new tracker using ZeroNet protocol, Keep SSL cert option between restarts, SSL Certificate pinning support for connections, Site lock support for connections, Certificate pinned connections using implicit SSL, Flood protection whitelist support, Foreign keys support for DB layer, Not support for SQL query helper, 0 length file get bugfix, Pex onion address support, Faster port testing, Faster uPnP port opening, Need connections more often on owned sites, Delay ZeroHello startup message if port check or Tor manager not ready yet, Use lockfiles to avoid double start, Save original socket on proxy monkey patching to get ability to connect localhost directly, Handle atomic write errors, Broken gevent https workaround helper, Rsa crypt functions, Plugin to Bootstrap using ZeroNet protocol
2016-01-05 00:20:52 +01:00
|
|
|
yield "<tr><td id='peers_%s' style='display: none; white-space: pre' colspan=6>" % site.address
|
2019-03-16 02:27:04 +01:00
|
|
|
for key, peer in list(site.peers.items()):
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
if peer.time_found:
|
Version 0.3.5, Rev830, Full Tor mode support with hidden services, Onion stats in Sidebar, GeoDB download fix using Tor, Gray out disabled sites in Stats page, Tor hidden service status in stat page, Benchmark sha256, Skyts tracker out expodie in, 2 new tracker using ZeroNet protocol, Keep SSL cert option between restarts, SSL Certificate pinning support for connections, Site lock support for connections, Certificate pinned connections using implicit SSL, Flood protection whitelist support, Foreign keys support for DB layer, Not support for SQL query helper, 0 length file get bugfix, Pex onion address support, Faster port testing, Faster uPnP port opening, Need connections more often on owned sites, Delay ZeroHello startup message if port check or Tor manager not ready yet, Use lockfiles to avoid double start, Save original socket on proxy monkey patching to get ability to connect localhost directly, Handle atomic write errors, Broken gevent https workaround helper, Rsa crypt functions, Plugin to Bootstrap using ZeroNet protocol
2016-01-05 00:20:52 +01:00
|
|
|
time_found = int(time.time() - peer.time_found) / 60
|
2015-07-25 13:38:58 +02:00
|
|
|
else:
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
time_found = "--"
|
2015-07-25 13:38:58 +02:00
|
|
|
if peer.connection:
|
|
|
|
connection_id = peer.connection.id
|
|
|
|
else:
|
|
|
|
connection_id = None
|
2017-10-03 14:36:50 +02:00
|
|
|
if site.content_manager.has_optional_files:
|
2015-11-17 12:48:03 +01:00
|
|
|
yield "Optional files: %4s " % len(peer.hashfield)
|
2017-02-27 00:00:27 +01:00
|
|
|
time_added = (time.time() - peer.time_added) / (60 * 60 * 24)
|
2019-11-25 14:38:53 +01:00
|
|
|
yield "(#%4s, rep: %2s, err: %s, found: %.1fs min, add: %.1f day) %30s -<br>" % (connection_id, peer.reputation, peer.connection_error, time_found, time_added, key)
|
2015-07-12 20:36:46 +02:00
|
|
|
yield "<br></td></tr>"
|
|
|
|
yield "</table>"
|
|
|
|
|
2020-11-03 02:44:31 +01:00
|
|
|
def renderBigfiles(self):
|
2017-10-13 01:17:07 +02:00
|
|
|
yield "<br><br><b>Big files</b>:<br>"
|
2019-03-16 02:27:04 +01:00
|
|
|
for site in list(self.server.sites.values()):
|
2017-10-13 01:17:07 +02:00
|
|
|
if not site.settings.get("has_bigfile"):
|
|
|
|
continue
|
|
|
|
bigfiles = {}
|
|
|
|
yield """<a href="#" onclick='document.getElementById("bigfiles_%s").style.display="initial"; return false'>%s</a><br>""" % (site.address, site.address)
|
2019-03-16 02:27:04 +01:00
|
|
|
for peer in list(site.peers.values()):
|
2017-10-13 01:17:07 +02:00
|
|
|
if not peer.time_piecefields_updated:
|
|
|
|
continue
|
2019-03-16 02:27:04 +01:00
|
|
|
for sha512, piecefield in peer.piecefields.items():
|
2017-10-13 01:17:07 +02:00
|
|
|
if sha512 not in bigfiles:
|
|
|
|
bigfiles[sha512] = []
|
|
|
|
bigfiles[sha512].append(peer)
|
|
|
|
|
|
|
|
yield "<div id='bigfiles_%s' style='display: none'>" % site.address
|
2019-03-16 02:27:04 +01:00
|
|
|
for sha512, peers in bigfiles.items():
|
2017-10-16 15:12:17 +02:00
|
|
|
yield "<br> - " + sha512 + " (hash id: %s)<br>" % site.content_manager.hashfield.getHashId(sha512)
|
2017-10-13 01:17:07 +02:00
|
|
|
yield "<table>"
|
|
|
|
for peer in peers:
|
|
|
|
yield "<tr><td>" + peer.key + "</td><td>" + peer.piecefields[sha512].tostring() + "</td></tr>"
|
|
|
|
yield "</table>"
|
|
|
|
yield "</div>"
|
|
|
|
|
2020-11-03 02:44:31 +01:00
|
|
|
def renderRequests(self):
|
|
|
|
import main
|
2017-10-13 01:17:27 +02:00
|
|
|
yield "<div style='float: left'>"
|
|
|
|
yield "<br><br><b>Sent commands</b>:<br>"
|
|
|
|
yield "<table>"
|
2019-03-16 02:27:04 +01:00
|
|
|
for stat_key, stat in sorted(main.file_server.stat_sent.items(), key=lambda i: i[1]["bytes"], reverse=True):
|
2017-10-16 15:12:17 +02:00
|
|
|
yield "<tr><td>%s</td><td style='white-space: nowrap'>x %s =</td><td>%.0fkB</td></tr>" % (stat_key, stat["num"], stat["bytes"] / 1024)
|
2017-10-13 01:17:27 +02:00
|
|
|
yield "</table>"
|
|
|
|
yield "</div>"
|
|
|
|
|
2017-10-16 15:12:17 +02:00
|
|
|
yield "<div style='float: left; margin-left: 20%; max-width: 50%'>"
|
2017-10-13 01:17:27 +02:00
|
|
|
yield "<br><br><b>Received commands</b>:<br>"
|
|
|
|
yield "<table>"
|
2019-03-16 02:27:04 +01:00
|
|
|
for stat_key, stat in sorted(main.file_server.stat_recv.items(), key=lambda i: i[1]["bytes"], reverse=True):
|
2017-10-16 15:12:17 +02:00
|
|
|
yield "<tr><td>%s</td><td style='white-space: nowrap'>x %s =</td><td>%.0fkB</td></tr>" % (stat_key, stat["num"], stat["bytes"] / 1024)
|
2017-10-13 01:17:27 +02:00
|
|
|
yield "</table>"
|
|
|
|
yield "</div>"
|
|
|
|
yield "<div style='clear: both'></div>"
|
|
|
|
|
2020-11-03 02:44:31 +01:00
|
|
|
def renderMemory(self):
|
|
|
|
import gc
|
|
|
|
from Ui import UiRequest
|
|
|
|
|
|
|
|
hpy = None
|
|
|
|
if self.get.get("size") == "1": # Calc obj size
|
|
|
|
try:
|
|
|
|
import guppy
|
|
|
|
hpy = guppy.hpy()
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
self.sendHeader()
|
2015-09-28 22:07:26 +02:00
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
# Object types
|
|
|
|
|
|
|
|
obj_count = {}
|
|
|
|
for obj in gc.get_objects():
|
|
|
|
obj_type = str(type(obj))
|
|
|
|
if obj_type not in obj_count:
|
|
|
|
obj_count[obj_type] = [0, 0]
|
|
|
|
obj_count[obj_type][0] += 1 # Count
|
|
|
|
obj_count[obj_type][1] += float(sys.getsizeof(obj)) / 1024 # Size
|
|
|
|
|
|
|
|
yield "<br><br><b>Objects in memory (types: %s, total: %s, %.2fkb):</b><br>" % (
|
|
|
|
len(obj_count),
|
2019-03-16 02:27:04 +01:00
|
|
|
sum([stat[0] for stat in list(obj_count.values())]),
|
|
|
|
sum([stat[1] for stat in list(obj_count.values())])
|
2015-07-12 20:36:46 +02:00
|
|
|
)
|
|
|
|
|
2019-03-16 02:27:04 +01:00
|
|
|
for obj, stat in sorted(list(obj_count.items()), key=lambda x: x[1][0], reverse=True): # Sorted by count
|
|
|
|
yield " - %.1fkb = %s x <a href=\"/Listobj?type=%s\">%s</a><br>" % (stat[1], stat[0], obj, html.escape(obj))
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
# Classes
|
|
|
|
|
|
|
|
class_count = {}
|
|
|
|
for obj in gc.get_objects():
|
|
|
|
obj_type = str(type(obj))
|
|
|
|
if obj_type != "<type 'instance'>":
|
|
|
|
continue
|
|
|
|
class_name = obj.__class__.__name__
|
|
|
|
if class_name not in class_count:
|
|
|
|
class_count[class_name] = [0, 0]
|
|
|
|
class_count[class_name][0] += 1 # Count
|
|
|
|
class_count[class_name][1] += float(sys.getsizeof(obj)) / 1024 # Size
|
|
|
|
|
|
|
|
yield "<br><br><b>Classes in memory (types: %s, total: %s, %.2fkb):</b><br>" % (
|
|
|
|
len(class_count),
|
2019-03-16 02:27:04 +01:00
|
|
|
sum([stat[0] for stat in list(class_count.values())]),
|
|
|
|
sum([stat[1] for stat in list(class_count.values())])
|
2015-07-12 20:36:46 +02:00
|
|
|
)
|
|
|
|
|
2019-03-16 02:27:04 +01:00
|
|
|
for obj, stat in sorted(list(class_count.items()), key=lambda x: x[1][0], reverse=True): # Sorted by count
|
|
|
|
yield " - %.1fkb = %s x <a href=\"/Dumpobj?class=%s\">%s</a><br>" % (stat[1], stat[0], obj, html.escape(obj))
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
from greenlet import greenlet
|
|
|
|
objs = [obj for obj in gc.get_objects() if isinstance(obj, greenlet)]
|
|
|
|
yield "<br>Greenlets (%s):<br>" % len(objs)
|
|
|
|
for obj in objs:
|
2019-03-16 02:27:04 +01:00
|
|
|
yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), html.escape(repr(obj)))
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
from Worker import Worker
|
|
|
|
objs = [obj for obj in gc.get_objects() if isinstance(obj, Worker)]
|
|
|
|
yield "<br>Workers (%s):<br>" % len(objs)
|
|
|
|
for obj in objs:
|
2019-03-16 02:27:04 +01:00
|
|
|
yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), html.escape(repr(obj)))
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
from Connection import Connection
|
|
|
|
objs = [obj for obj in gc.get_objects() if isinstance(obj, Connection)]
|
|
|
|
yield "<br>Connections (%s):<br>" % len(objs)
|
|
|
|
for obj in objs:
|
2019-03-16 02:27:04 +01:00
|
|
|
yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), html.escape(repr(obj)))
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
from socket import socket
|
|
|
|
objs = [obj for obj in gc.get_objects() if isinstance(obj, socket)]
|
|
|
|
yield "<br>Sockets (%s):<br>" % len(objs)
|
|
|
|
for obj in objs:
|
2019-03-16 02:27:04 +01:00
|
|
|
yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), html.escape(repr(obj)))
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
from msgpack import Unpacker
|
|
|
|
objs = [obj for obj in gc.get_objects() if isinstance(obj, Unpacker)]
|
|
|
|
yield "<br>Msgpack unpacker (%s):<br>" % len(objs)
|
|
|
|
for obj in objs:
|
2019-03-16 02:27:04 +01:00
|
|
|
yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), html.escape(repr(obj)))
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2019-04-15 12:31:33 +02:00
|
|
|
from Site.Site import Site
|
2015-07-12 20:36:46 +02:00
|
|
|
objs = [obj for obj in gc.get_objects() if isinstance(obj, Site)]
|
|
|
|
yield "<br>Sites (%s):<br>" % len(objs)
|
|
|
|
for obj in objs:
|
2019-03-16 02:27:04 +01:00
|
|
|
yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), html.escape(repr(obj)))
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
objs = [obj for obj in gc.get_objects() if isinstance(obj, self.server.log.__class__)]
|
|
|
|
yield "<br>Loggers (%s):<br>" % len(objs)
|
|
|
|
for obj in objs:
|
2019-03-16 02:27:04 +01:00
|
|
|
yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), html.escape(repr(obj.name)))
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
objs = [obj for obj in gc.get_objects() if isinstance(obj, UiRequest)]
|
|
|
|
yield "<br>UiRequests (%s):<br>" % len(objs)
|
|
|
|
for obj in objs:
|
2019-03-16 02:27:04 +01:00
|
|
|
yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), html.escape(repr(obj)))
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
from Peer import Peer
|
|
|
|
objs = [obj for obj in gc.get_objects() if isinstance(obj, Peer)]
|
|
|
|
yield "<br>Peers (%s):<br>" % len(objs)
|
|
|
|
for obj in objs:
|
2019-03-16 02:27:04 +01:00
|
|
|
yield " - %.1fkb: %s<br>" % (self.getObjSize(obj, hpy), html.escape(repr(obj)))
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2019-03-16 02:27:04 +01:00
|
|
|
objs = [(key, val) for key, val in sys.modules.items() if val is not None]
|
2015-07-12 20:36:46 +02:00
|
|
|
objs.sort()
|
|
|
|
yield "<br>Modules (%s):<br>" % len(objs)
|
|
|
|
for module_name, module in objs:
|
2019-03-16 02:27:04 +01:00
|
|
|
yield " - %.3fkb: %s %s<br>" % (self.getObjSize(module, hpy), module_name, html.escape(repr(module)))
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2020-11-03 02:44:31 +01:00
|
|
|
# /Stats entry point
|
|
|
|
@helper.encodeResponse
|
|
|
|
def actionStats(self):
|
|
|
|
import gc
|
|
|
|
|
|
|
|
self.sendHeader()
|
|
|
|
|
|
|
|
if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local:
|
|
|
|
yield "This function is disabled on this proxy"
|
|
|
|
return
|
|
|
|
|
|
|
|
s = time.time()
|
|
|
|
|
|
|
|
# Style
|
|
|
|
yield """
|
|
|
|
<style>
|
|
|
|
* { font-family: monospace }
|
|
|
|
table td, table th { text-align: right; padding: 0px 10px }
|
|
|
|
.connections td { white-space: nowrap }
|
|
|
|
.serving-False { opacity: 0.3 }
|
|
|
|
</style>
|
|
|
|
"""
|
|
|
|
|
|
|
|
renderers = [
|
|
|
|
self.renderHead(),
|
|
|
|
self.renderConnectionsTable(),
|
|
|
|
self.renderTrackers(),
|
|
|
|
self.renderTor(),
|
|
|
|
self.renderDbStats(),
|
|
|
|
self.renderSites(),
|
|
|
|
self.renderBigfiles(),
|
|
|
|
self.renderRequests()
|
|
|
|
|
|
|
|
]
|
|
|
|
|
|
|
|
for part in itertools.chain(*renderers):
|
|
|
|
yield part
|
|
|
|
|
|
|
|
if config.debug:
|
|
|
|
for part in self.renderMemory():
|
|
|
|
yield part
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
gc.collect() # Implicit grabage collection
|
|
|
|
yield "Done in %.1f" % (time.time() - s)
|
|
|
|
|
2019-07-15 09:50:24 +02:00
|
|
|
@helper.encodeResponse
|
2015-07-12 20:36:46 +02:00
|
|
|
def actionDumpobj(self):
|
2015-09-28 22:07:26 +02:00
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
import gc
|
|
|
|
import sys
|
|
|
|
|
|
|
|
self.sendHeader()
|
2015-09-28 22:07:26 +02:00
|
|
|
|
2016-03-16 00:37:31 +01:00
|
|
|
if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local:
|
|
|
|
yield "This function is disabled on this proxy"
|
2019-03-27 03:06:22 +01:00
|
|
|
return
|
2016-03-16 00:37:31 +01:00
|
|
|
|
2015-09-28 22:07:26 +02:00
|
|
|
# No more if not in debug mode
|
|
|
|
if not config.debug:
|
|
|
|
yield "Not in debug mode"
|
2019-03-27 03:06:22 +01:00
|
|
|
return
|
2015-09-28 22:07:26 +02:00
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
class_filter = self.get.get("class")
|
|
|
|
|
|
|
|
yield """
|
|
|
|
<style>
|
|
|
|
* { font-family: monospace; white-space: pre }
|
|
|
|
table * { text-align: right; padding: 0px 10px }
|
|
|
|
</style>
|
|
|
|
"""
|
|
|
|
|
|
|
|
objs = gc.get_objects()
|
|
|
|
for obj in objs:
|
|
|
|
obj_type = str(type(obj))
|
|
|
|
if obj_type != "<type 'instance'>" or obj.__class__.__name__ != class_filter:
|
|
|
|
continue
|
2019-03-16 02:27:04 +01:00
|
|
|
yield "%.1fkb %s... " % (float(sys.getsizeof(obj)) / 1024, html.escape(str(obj)))
|
2015-07-12 20:36:46 +02:00
|
|
|
for attr in dir(obj):
|
2019-03-16 02:27:04 +01:00
|
|
|
yield "- %s: %s<br>" % (attr, html.escape(str(getattr(obj, attr))))
|
2015-07-12 20:36:46 +02:00
|
|
|
yield "<br>"
|
|
|
|
|
|
|
|
gc.collect() # Implicit grabage collection
|
|
|
|
|
2019-07-15 09:50:24 +02:00
|
|
|
@helper.encodeResponse
|
2015-07-12 20:36:46 +02:00
|
|
|
def actionListobj(self):
|
2015-09-28 22:07:26 +02:00
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
import gc
|
|
|
|
import sys
|
|
|
|
|
|
|
|
self.sendHeader()
|
2015-09-28 22:07:26 +02:00
|
|
|
|
2016-03-16 00:37:31 +01:00
|
|
|
if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local:
|
|
|
|
yield "This function is disabled on this proxy"
|
2019-03-27 03:06:22 +01:00
|
|
|
return
|
2016-03-16 00:37:31 +01:00
|
|
|
|
2015-09-28 22:07:26 +02:00
|
|
|
# No more if not in debug mode
|
|
|
|
if not config.debug:
|
|
|
|
yield "Not in debug mode"
|
2019-03-27 03:06:22 +01:00
|
|
|
return
|
2015-09-28 22:07:26 +02:00
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
type_filter = self.get.get("type")
|
|
|
|
|
|
|
|
yield """
|
|
|
|
<style>
|
|
|
|
* { font-family: monospace; white-space: pre }
|
|
|
|
table * { text-align: right; padding: 0px 10px }
|
|
|
|
</style>
|
|
|
|
"""
|
|
|
|
|
2019-03-16 02:27:04 +01:00
|
|
|
yield "Listing all %s objects in memory...<br>" % html.escape(type_filter)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
ref_count = {}
|
|
|
|
objs = gc.get_objects()
|
|
|
|
for obj in objs:
|
|
|
|
obj_type = str(type(obj))
|
|
|
|
if obj_type != type_filter:
|
|
|
|
continue
|
|
|
|
refs = [
|
|
|
|
ref for ref in gc.get_referrers(obj)
|
|
|
|
if hasattr(ref, "__class__") and
|
|
|
|
ref.__class__.__name__ not in ["list", "dict", "function", "type", "frame", "WeakSet", "tuple"]
|
|
|
|
]
|
|
|
|
if not refs:
|
|
|
|
continue
|
Version 0.3.5, Rev830, Full Tor mode support with hidden services, Onion stats in Sidebar, GeoDB download fix using Tor, Gray out disabled sites in Stats page, Tor hidden service status in stat page, Benchmark sha256, Skyts tracker out expodie in, 2 new tracker using ZeroNet protocol, Keep SSL cert option between restarts, SSL Certificate pinning support for connections, Site lock support for connections, Certificate pinned connections using implicit SSL, Flood protection whitelist support, Foreign keys support for DB layer, Not support for SQL query helper, 0 length file get bugfix, Pex onion address support, Faster port testing, Faster uPnP port opening, Need connections more often on owned sites, Delay ZeroHello startup message if port check or Tor manager not ready yet, Use lockfiles to avoid double start, Save original socket on proxy monkey patching to get ability to connect localhost directly, Handle atomic write errors, Broken gevent https workaround helper, Rsa crypt functions, Plugin to Bootstrap using ZeroNet protocol
2016-01-05 00:20:52 +01:00
|
|
|
try:
|
|
|
|
yield "%.1fkb <span title=\"%s\">%s</span>... " % (
|
2019-03-16 02:27:04 +01:00
|
|
|
float(sys.getsizeof(obj)) / 1024, html.escape(str(obj)), html.escape(str(obj)[0:100].ljust(100))
|
Version 0.3.5, Rev830, Full Tor mode support with hidden services, Onion stats in Sidebar, GeoDB download fix using Tor, Gray out disabled sites in Stats page, Tor hidden service status in stat page, Benchmark sha256, Skyts tracker out expodie in, 2 new tracker using ZeroNet protocol, Keep SSL cert option between restarts, SSL Certificate pinning support for connections, Site lock support for connections, Certificate pinned connections using implicit SSL, Flood protection whitelist support, Foreign keys support for DB layer, Not support for SQL query helper, 0 length file get bugfix, Pex onion address support, Faster port testing, Faster uPnP port opening, Need connections more often on owned sites, Delay ZeroHello startup message if port check or Tor manager not ready yet, Use lockfiles to avoid double start, Save original socket on proxy monkey patching to get ability to connect localhost directly, Handle atomic write errors, Broken gevent https workaround helper, Rsa crypt functions, Plugin to Bootstrap using ZeroNet protocol
2016-01-05 00:20:52 +01:00
|
|
|
)
|
2020-11-03 02:44:31 +01:00
|
|
|
except Exception:
|
Version 0.3.5, Rev830, Full Tor mode support with hidden services, Onion stats in Sidebar, GeoDB download fix using Tor, Gray out disabled sites in Stats page, Tor hidden service status in stat page, Benchmark sha256, Skyts tracker out expodie in, 2 new tracker using ZeroNet protocol, Keep SSL cert option between restarts, SSL Certificate pinning support for connections, Site lock support for connections, Certificate pinned connections using implicit SSL, Flood protection whitelist support, Foreign keys support for DB layer, Not support for SQL query helper, 0 length file get bugfix, Pex onion address support, Faster port testing, Faster uPnP port opening, Need connections more often on owned sites, Delay ZeroHello startup message if port check or Tor manager not ready yet, Use lockfiles to avoid double start, Save original socket on proxy monkey patching to get ability to connect localhost directly, Handle atomic write errors, Broken gevent https workaround helper, Rsa crypt functions, Plugin to Bootstrap using ZeroNet protocol
2016-01-05 00:20:52 +01:00
|
|
|
continue
|
2015-07-12 20:36:46 +02:00
|
|
|
for ref in refs:
|
|
|
|
yield " ["
|
|
|
|
if "object at" in str(ref) or len(str(ref)) > 100:
|
|
|
|
yield str(ref.__class__.__name__)
|
|
|
|
else:
|
2019-03-16 02:27:04 +01:00
|
|
|
yield str(ref.__class__.__name__) + ":" + html.escape(str(ref))
|
2015-07-12 20:36:46 +02:00
|
|
|
yield "] "
|
|
|
|
ref_type = ref.__class__.__name__
|
|
|
|
if ref_type not in ref_count:
|
|
|
|
ref_count[ref_type] = [0, 0]
|
|
|
|
ref_count[ref_type][0] += 1 # Count
|
|
|
|
ref_count[ref_type][1] += float(sys.getsizeof(obj)) / 1024 # Size
|
|
|
|
yield "<br>"
|
|
|
|
|
2019-03-16 02:27:04 +01:00
|
|
|
yield "<br>Object referrer (total: %s, %.2fkb):<br>" % (len(ref_count), sum([stat[1] for stat in list(ref_count.values())]))
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2019-03-16 02:27:04 +01:00
|
|
|
for obj, stat in sorted(list(ref_count.items()), key=lambda x: x[1][0], reverse=True)[0:30]: # Sorted by count
|
|
|
|
yield " - %.1fkb = %s x %s<br>" % (stat[1], stat[0], html.escape(str(obj)))
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
gc.collect() # Implicit grabage collection
|
|
|
|
|
2019-07-15 09:50:24 +02:00
|
|
|
@helper.encodeResponse
|
2015-07-12 20:36:46 +02:00
|
|
|
def actionGcCollect(self):
|
|
|
|
import gc
|
|
|
|
self.sendHeader()
|
|
|
|
yield str(gc.collect())
|
2020-11-03 02:48:01 +01:00
|
|
|
|
|
|
|
# /About entry point
|
|
|
|
@helper.encodeResponse
|
|
|
|
def actionEnv(self):
|
|
|
|
import main
|
|
|
|
|
|
|
|
self.sendHeader()
|
|
|
|
|
|
|
|
yield """
|
|
|
|
<style>
|
|
|
|
* { font-family: monospace; white-space: pre; }
|
|
|
|
h2 { font-size: 100%; margin-bottom: 0px; }
|
|
|
|
small { opacity: 0.5; }
|
|
|
|
table { border-collapse: collapse; }
|
|
|
|
td { padding-right: 10px; }
|
|
|
|
</style>
|
|
|
|
"""
|
|
|
|
|
|
|
|
if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local:
|
|
|
|
yield "This function is disabled on this proxy"
|
|
|
|
return
|
|
|
|
|
|
|
|
yield from main.actions.testEnv(format="html")
|
|
|
|
|
|
|
|
|
|
|
|
@PluginManager.registerTo("Actions")
|
|
|
|
class ActionsPlugin:
|
|
|
|
def formatTable(self, *rows, format="text"):
|
|
|
|
if format == "html":
|
|
|
|
return self.formatTableHtml(*rows)
|
|
|
|
else:
|
|
|
|
return self.formatTableText(*rows)
|
|
|
|
|
|
|
|
def formatHead(self, title, format="text"):
|
|
|
|
if format == "html":
|
|
|
|
return "<h2>%s</h2>" % title
|
|
|
|
else:
|
|
|
|
return "\n* %s\n" % title
|
|
|
|
|
|
|
|
def formatTableHtml(self, *rows):
|
|
|
|
yield "<table>"
|
|
|
|
for row in rows:
|
|
|
|
yield "<tr>"
|
|
|
|
for col in row:
|
|
|
|
yield "<td>%s</td>" % html.escape(str(col))
|
|
|
|
yield "</tr>"
|
|
|
|
yield "</table>"
|
|
|
|
|
|
|
|
def formatTableText(self, *rows):
|
|
|
|
for row in rows:
|
|
|
|
yield " "
|
|
|
|
for col in row:
|
|
|
|
yield " " + str(col)
|
|
|
|
yield "\n"
|
|
|
|
|
|
|
|
def testEnv(self, format="text"):
|
|
|
|
import gevent
|
|
|
|
import msgpack
|
|
|
|
import pkg_resources
|
|
|
|
import importlib
|
|
|
|
import coincurve
|
|
|
|
import sqlite3
|
|
|
|
from Crypt import CryptBitcoin
|
|
|
|
|
|
|
|
yield "\n"
|
|
|
|
|
|
|
|
yield from self.formatTable(
|
|
|
|
["ZeroNet version:", "%s rev%s" % (config.version, config.rev)],
|
|
|
|
["Python:", "%s" % sys.version],
|
|
|
|
["Platform:", "%s" % sys.platform],
|
|
|
|
["Crypt verify lib:", "%s" % CryptBitcoin.lib_verify_best],
|
|
|
|
["OpenSSL:", "%s" % CryptBitcoin.sslcrypto.ecc.get_backend()],
|
|
|
|
["Libsecp256k1:", "%s" % type(coincurve._libsecp256k1.lib).__name__],
|
|
|
|
["SQLite:", "%s, API: %s" % (sqlite3.sqlite_version, sqlite3.version)],
|
|
|
|
format=format
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
yield self.formatHead("Libraries:")
|
|
|
|
rows = []
|
|
|
|
for lib_name in ["gevent", "greenlet", "msgpack", "base58", "merkletools", "rsa", "socks", "pyasn1", "gevent_ws", "websocket", "maxminddb"]:
|
|
|
|
try:
|
|
|
|
module = importlib.import_module(lib_name)
|
|
|
|
if "__version__" in dir(module):
|
|
|
|
version = module.__version__
|
|
|
|
elif "version" in dir(module):
|
|
|
|
version = module.version
|
|
|
|
else:
|
|
|
|
version = "unknown version"
|
|
|
|
|
|
|
|
if type(version) is tuple:
|
|
|
|
version = ".".join(map(str, version))
|
|
|
|
|
|
|
|
rows.append(["- %s:" % lib_name, version, "at " + module.__file__])
|
|
|
|
except Exception as err:
|
|
|
|
rows.append(["! Error importing %s:", repr(err)])
|
|
|
|
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
yield " - %s<br>" % html.escape(repr(pkg_resources.get_distribution(lib_name)))
|
|
|
|
except Exception as err:
|
|
|
|
yield " ! %s<br>" % html.escape(repr(err))
|
|
|
|
"""
|
|
|
|
|
|
|
|
yield from self.formatTable(*rows, format=format)
|
|
|
|
|
|
|
|
yield self.formatHead("Library config:", format=format)
|
|
|
|
|
|
|
|
yield from self.formatTable(
|
|
|
|
["- gevent:", gevent.config.loop.__module__],
|
|
|
|
["- msgpack unpacker:", msgpack.Unpacker.__module__],
|
|
|
|
format=format
|
|
|
|
)
|