From b0b9a4d33c9d7ad285ad8bd3ecbcb26be8cc7871 Mon Sep 17 00:00:00 2001 From: shortcutme Date: Fri, 15 Mar 2019 21:06:59 +0100 Subject: [PATCH] Change to Python3 coding style --- plugins/AnnounceLocal/AnnounceLocalPlugin.py | 6 +- plugins/AnnounceLocal/Test/TestAnnounce.py | 8 +- plugins/AnnounceLocal/__init__.py | 2 +- plugins/AnnounceShare/AnnounceSharePlugin.py | 8 +- .../AnnounceShare/Test/TestAnnounceShare.py | 1 - plugins/AnnounceShare/__init__.py | 2 +- plugins/AnnounceZero/AnnounceZeroPlugin.py | 2 +- plugins/AnnounceZero/__init__.py | 2 +- plugins/Bigfile/BigfilePiecefield.py | 30 ++--- plugins/Bigfile/BigfilePlugin.py | 35 +++--- plugins/Bigfile/Test/TestBigfile.py | 56 ++++----- plugins/Bigfile/__init__.py | 4 +- plugins/Chart/ChartCollector.py | 34 +++--- plugins/Chart/ChartPlugin.py | 6 +- plugins/Chart/__init__.py | 2 +- plugins/ContentFilter/ContentFilterPlugin.py | 18 +-- plugins/ContentFilter/ContentFilterStorage.py | 2 +- plugins/ContentFilter/__init__.py | 2 +- plugins/Cors/CorsPlugin.py | 6 +- plugins/Cors/__init__.py | 2 +- plugins/CryptMessage/CryptMessage.py | 4 +- plugins/CryptMessage/CryptMessagePlugin.py | 4 +- plugins/CryptMessage/__init__.py | 2 +- plugins/FilePack/FilePackPlugin.py | 2 +- plugins/FilePack/__init__.py | 2 +- plugins/MergerSite/MergerSitePlugin.py | 22 ++-- plugins/MergerSite/__init__.py | 2 +- plugins/Newsfeed/NewsfeedPlugin.py | 12 +- plugins/Newsfeed/__init__.py | 2 +- plugins/OptionalManager/ContentDbPlugin.py | 16 +-- .../OptionalManager/OptionalManagerPlugin.py | 10 +- .../Test/TestOptionalManager.py | 15 +-- plugins/OptionalManager/UiWebsocketPlugin.py | 16 +-- plugins/OptionalManager/__init__.py | 2 +- plugins/PeerDb/PeerDbPlugin.py | 4 +- plugins/PeerDb/__init__.py | 2 +- plugins/Sidebar/SidebarPlugin.py | 106 +++++++++--------- plugins/Sidebar/ZipStream.py | 8 +- plugins/Sidebar/__init__.py | 2 +- plugins/Stats/__init__.py | 2 +- plugins/TranslateSite/TranslateSitePlugin.py | 16 +-- plugins/TranslateSite/__init__.py | 2 +- plugins/Trayicon/TrayiconPlugin.py | 6 +- plugins/Trayicon/__init__.py | 2 +- plugins/Trayicon/languages/es.json | 2 +- plugins/Trayicon/lib/notificationicon.py | 68 +++++------ plugins/Trayicon/lib/winfolders.py | 7 +- plugins/UiConfig/UiConfigPlugin.py | 7 +- plugins/UiConfig/__init__.py | 2 +- .../BootstrapperPlugin.py | 6 +- .../Test/TestBootstrapper.py | 2 +- plugins/disabled-Bootstrapper/__init__.py | 2 +- .../disabled-Dnschain/SiteManagerPlugin.py | 4 +- plugins/disabled-DonationMessage/__init__.py | 2 +- plugins/disabled-Multiuser/MultiuserPlugin.py | 6 +- plugins/disabled-Multiuser/__init__.py | 2 +- plugins/disabled-StemPort/StemPortPlugin.py | 22 ++-- plugins/disabled-StemPort/__init__.py | 20 ++-- plugins/disabled-UiPassword/__init__.py | 2 +- .../SiteManagerPlugin.py | 2 +- plugins/disabled-Zeroname-local/__init__.py | 4 +- .../bitcoinrpc/authproxy.py | 10 +- .../disabled-Zeroname-local/domainLookup.py | 2 +- src/Config.py | 10 +- src/Connection/Connection.py | 14 +-- src/Connection/ConnectionServer.py | 10 +- src/Connection/__init__.py | 4 +- src/Content/ContentDb.py | 6 +- src/Content/ContentDbDict.py | 20 ++-- src/Content/ContentManager.py | 67 +++++------ src/Content/__init__.py | 2 +- src/Crypt/CryptHash.py | 8 +- src/Crypt/CryptRsa.py | 2 +- src/Db/Db.py | 25 ++--- src/Db/DbQuery.py | 6 +- src/Db/__init__.py | 6 +- src/Debug/Debug.py | 18 +-- src/Debug/DebugHook.py | 19 ++-- src/Debug/DebugMedia.py | 29 ++--- src/Debug/__init__.py | 1 - src/File/FileRequest.py | 16 +-- src/File/FileServer.py | 18 +-- src/File/__init__.py | 4 +- src/Peer/Peer.py | 19 ++-- src/Peer/PeerHashfield.py | 4 +- src/Peer/PeerPortchecker.py | 17 ++- src/Peer/__init__.py | 4 +- src/Plugin/PluginManager.py | 23 ++-- src/Site/Site.py | 48 ++++---- src/Site/SiteAnnouncer.py | 21 ++-- src/Site/SiteManager.py | 16 +-- src/Site/SiteStorage.py | 44 ++++---- src/Site/__init__.py | 6 +- src/Test/BenchmarkSsl.py | 14 +-- src/Test/Spy.py | 8 +- src/Test/TestContent.py | 20 ++-- src/Test/TestContentUser.py | 2 +- src/Test/TestDb.py | 12 +- src/Test/TestDiff.py | 14 +-- src/Test/TestFileRequest.py | 16 +-- src/Test/TestPeer.py | 12 +- src/Test/TestSite.py | 4 +- src/Test/TestSiteDownload.py | 12 +- src/Test/TestTor.py | 8 +- src/Test/TestTranslate.py | 3 - src/Test/TestUpnpPunch.py | 8 +- src/Test/TestUser.py | 2 +- src/Test/TestWeb.py | 4 +- src/Test/conftest.py | 32 +++--- src/Tor/TorManager.py | 20 ++-- src/Tor/__init__.py | 2 +- src/Translate/Translate.py | 10 +- src/Translate/__init__.py | 2 +- src/Ui/UiRequest.py | 48 ++++---- src/Ui/UiServer.py | 29 ++--- src/Ui/UiWebsocket.py | 64 +++++------ src/Ui/__init__.py | 6 +- src/User/User.py | 5 +- src/User/UserManager.py | 8 +- src/User/__init__.py | 2 +- src/Worker/Worker.py | 9 +- src/Worker/WorkerManager.py | 24 ++-- src/Worker/__init__.py | 4 +- src/lib/cssvendor/cssvendor.py | 24 ++-- src/main.py | 71 ++++++------ src/util/Diff.py | 4 +- src/util/Event.py | 6 +- src/util/Noparallel.py | 56 ++++----- src/util/Platform.py | 2 +- src/util/Pooled.py | 4 +- src/util/QueryJson.py | 2 +- src/util/RateLimit.py | 22 ++-- src/util/SslPatch.py | 4 +- src/util/UpnpPunch.py | 24 ++-- src/util/__init__.py | 6 +- src/util/helper.py | 16 +-- zeronet.py | 30 ++--- 137 files changed, 910 insertions(+), 913 deletions(-) diff --git a/plugins/AnnounceLocal/AnnounceLocalPlugin.py b/plugins/AnnounceLocal/AnnounceLocalPlugin.py index 27b4d38a..0919762a 100644 --- a/plugins/AnnounceLocal/AnnounceLocalPlugin.py +++ b/plugins/AnnounceLocal/AnnounceLocalPlugin.py @@ -4,7 +4,7 @@ import gevent from Plugin import PluginManager from Config import config -import BroadcastServer +from . import BroadcastServer @PluginManager.registerTo("SiteAnnouncer") @@ -42,7 +42,7 @@ class LocalAnnouncer(BroadcastServer.BroadcastServer): if force: # Probably new site added, clean cache self.known_peers = {} - for peer_id, known_peer in self.known_peers.items(): + for peer_id, known_peer in list(self.known_peers.items()): if time.time() - known_peer["found"] > 20 * 60: del(self.known_peers[peer_id]) self.log.debug("Timeout, removing from known_peers: %s" % peer_id) @@ -78,7 +78,7 @@ class LocalAnnouncer(BroadcastServer.BroadcastServer): def actionSiteListRequest(self, sender, params): back = [] - sites = self.server.sites.values() + sites = list(self.server.sites.values()) # Split adresses to group of 100 to avoid UDP size limit site_groups = [sites[i:i + 100] for i in range(0, len(sites), 100)] diff --git a/plugins/AnnounceLocal/Test/TestAnnounce.py b/plugins/AnnounceLocal/Test/TestAnnounce.py index 691ecc26..4def02ed 100644 --- a/plugins/AnnounceLocal/Test/TestAnnounce.py +++ b/plugins/AnnounceLocal/Test/TestAnnounce.py @@ -85,10 +85,10 @@ class TestAnnounce: def testPeerDiscover(self, announcer, announcer_remote, site): assert announcer.server.peer_id != announcer_remote.server.peer_id - assert len(announcer.server.sites.values()[0].peers) == 0 + assert len(list(announcer.server.sites.values())[0].peers) == 0 announcer.broadcast({"cmd": "discoverRequest"}, port=announcer_remote.listen_port) time.sleep(0.1) - assert len(announcer.server.sites.values()[0].peers) == 1 + assert len(list(announcer.server.sites.values())[0].peers) == 1 def testRecentPeerList(self, announcer, announcer_remote, site): assert len(site.peers_recent) == 0 @@ -101,13 +101,13 @@ class TestAnnounce: assert len(site.peers) == 1 # It should update peer without siteListResponse - last_time_found = site.peers.values()[0].time_found + last_time_found = list(site.peers.values())[0].time_found site.peers_recent.clear() with Spy.Spy(announcer, "handleMessage") as responses: announcer.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer_remote.listen_port) time.sleep(0.1) assert [response[1]["cmd"] for response in responses] == ["discoverResponse"] assert len(site.peers_recent) == 1 - assert site.peers.values()[0].time_found > last_time_found + assert list(site.peers.values())[0].time_found > last_time_found diff --git a/plugins/AnnounceLocal/__init__.py b/plugins/AnnounceLocal/__init__.py index defe2412..5b80abd2 100644 --- a/plugins/AnnounceLocal/__init__.py +++ b/plugins/AnnounceLocal/__init__.py @@ -1 +1 @@ -import AnnounceLocalPlugin \ No newline at end of file +from . import AnnounceLocalPlugin \ No newline at end of file diff --git a/plugins/AnnounceShare/AnnounceSharePlugin.py b/plugins/AnnounceShare/AnnounceSharePlugin.py index 10e3a3e6..8c0a8ec4 100644 --- a/plugins/AnnounceShare/AnnounceSharePlugin.py +++ b/plugins/AnnounceShare/AnnounceSharePlugin.py @@ -75,7 +75,7 @@ class TrackerStorage(object): def getWorkingTrackers(self, type="shared"): trackers = { - key: tracker for key, tracker in self.getTrackers(type).iteritems() + key: tracker for key, tracker in self.getTrackers(type).items() if tracker["time_success"] > time.time() - 60 * 60 } return trackers @@ -95,7 +95,7 @@ class TrackerStorage(object): trackers = self.getTrackers() self.log.debug("Loaded %s shared trackers" % len(trackers)) - for address, tracker in trackers.items(): + for address, tracker in list(trackers.items()): tracker["num_error"] = 0 if not address.startswith("zero://"): del trackers[address] @@ -144,7 +144,7 @@ class SiteAnnouncerPlugin(object): tracker_storage.time_discover = time.time() gevent.spawn(tracker_storage.discoverTrackers, self.site.getConnectedPeers()) trackers = super(SiteAnnouncerPlugin, self).getTrackers() - shared_trackers = tracker_storage.getTrackers("shared").keys() + shared_trackers = list(tracker_storage.getTrackers("shared").keys()) if shared_trackers: return trackers + shared_trackers else: @@ -164,7 +164,7 @@ class SiteAnnouncerPlugin(object): @PluginManager.registerTo("FileRequest") class FileRequestPlugin(object): def actionGetTrackers(self, params): - shared_trackers = tracker_storage.getWorkingTrackers("shared").keys() + shared_trackers = list(tracker_storage.getWorkingTrackers("shared").keys()) self.response({"trackers": shared_trackers}) diff --git a/plugins/AnnounceShare/Test/TestAnnounceShare.py b/plugins/AnnounceShare/Test/TestAnnounceShare.py index 4608eda7..7178eac8 100644 --- a/plugins/AnnounceShare/Test/TestAnnounceShare.py +++ b/plugins/AnnounceShare/Test/TestAnnounceShare.py @@ -12,7 +12,6 @@ class TestAnnounceShare: open("%s/trackers.json" % config.data_dir, "w").write("{}") tracker_storage = AnnounceSharePlugin.tracker_storage tracker_storage.load() - print tracker_storage.file_path, config.data_dir peer = Peer(file_server.ip, 1544, connection_server=file_server) assert peer.request("getTrackers")["trackers"] == [] diff --git a/plugins/AnnounceShare/__init__.py b/plugins/AnnounceShare/__init__.py index f55cb2c6..dc1e40bd 100644 --- a/plugins/AnnounceShare/__init__.py +++ b/plugins/AnnounceShare/__init__.py @@ -1 +1 @@ -import AnnounceSharePlugin +from . import AnnounceSharePlugin diff --git a/plugins/AnnounceZero/AnnounceZeroPlugin.py b/plugins/AnnounceZero/AnnounceZeroPlugin.py index b7f9e823..a3f4197a 100644 --- a/plugins/AnnounceZero/AnnounceZeroPlugin.py +++ b/plugins/AnnounceZero/AnnounceZeroPlugin.py @@ -119,7 +119,7 @@ class SiteAnnouncerPlugin(object): onion = self.site.connection_server.tor_manager.getOnion(site.address) publickey = self.site.connection_server.tor_manager.getPublickey(onion) if publickey not in request["onion_signs"]: - sign = CryptRsa.sign(res["onion_sign_this"], self.site.connection_server.tor_manager.getPrivatekey(onion)) + sign = CryptRsa.sign(res["onion_sign_this"].encode("utf8"), self.site.connection_server.tor_manager.getPrivatekey(onion)) request["onion_signs"][publickey] = sign res = tracker_peer.request("announce", request) if not res or "onion_sign_this" in res: diff --git a/plugins/AnnounceZero/__init__.py b/plugins/AnnounceZero/__init__.py index 4b9cbe10..8aec5ddb 100644 --- a/plugins/AnnounceZero/__init__.py +++ b/plugins/AnnounceZero/__init__.py @@ -1 +1 @@ -import AnnounceZeroPlugin \ No newline at end of file +from . import AnnounceZeroPlugin \ No newline at end of file diff --git a/plugins/Bigfile/BigfilePiecefield.py b/plugins/Bigfile/BigfilePiecefield.py index c7690279..87170c83 100644 --- a/plugins/Bigfile/BigfilePiecefield.py +++ b/plugins/Bigfile/BigfilePiecefield.py @@ -4,7 +4,7 @@ import array def packPiecefield(data): res = [] if not data: - return array.array("H", "") + return array.array("H", b"") if data[0] == "0": res.append(0) @@ -48,7 +48,7 @@ class BigfilePiecefield(object): __slots__ = ["data"] def __init__(self): - self.data = "" + self.data = b"" def fromstring(self, s): self.data = s @@ -71,7 +71,7 @@ class BigfilePiecefield(object): def __setitem__(self, key, value): data = self.data if len(data) < key: - data = data.ljust(key+1, "0") + data = data.ljust(key + 1, "0") data = data[:key] + str(int(value)) + data[key + 1:] self.data = data @@ -80,7 +80,7 @@ class BigfilePiecefieldPacked(object): __slots__ = ["data"] def __init__(self): - self.data = "" + self.data = b"" def fromstring(self, data): self.data = packPiecefield(data).tostring() @@ -103,7 +103,7 @@ class BigfilePiecefieldPacked(object): def __setitem__(self, key, value): data = self.tostring() if len(data) < key: - data = data.ljust(key+1, "0") + data = data.ljust(key + 1, "0") data = data[:key] + str(int(value)) + data[key + 1:] self.fromstring(data) @@ -116,7 +116,7 @@ if __name__ == "__main__": meminfo = psutil.Process(os.getpid()).memory_info for storage in [BigfilePiecefieldPacked, BigfilePiecefield]: - print "-- Testing storage: %s --" % storage + print("-- Testing storage: %s --" % storage)) m = meminfo()[0] s = time.time() piecefields = {} @@ -125,34 +125,34 @@ if __name__ == "__main__": piecefield.fromstring(testdata[:i] + "0" + testdata[i + 1:]) piecefields[i] = piecefield - print "Create x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data)) + print("Create x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data))) m = meminfo()[0] s = time.time() - for piecefield in piecefields.values(): + for piecefield in list(piecefields.values()): val = piecefield[1000] - print "Query one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s) + print("Query one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s)) m = meminfo()[0] s = time.time() - for piecefield in piecefields.values(): + for piecefield in list(piecefields.values()): piecefield[1000] = True - print "Change one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s) + print("Change one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s)) m = meminfo()[0] s = time.time() - for piecefield in piecefields.values(): + for piecefield in list(piecefields.values()): packed = piecefield.pack() - print "Pack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(packed)) + print("Pack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(packed))) m = meminfo()[0] s = time.time() - for piecefield in piecefields.values(): + for piecefield in list(piecefields.values()): piecefield.unpack(packed) - print "Unpack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data)) + print("Unpack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data))) piecefields = {} diff --git a/plugins/Bigfile/BigfilePlugin.py b/plugins/Bigfile/BigfilePlugin.py index 484d2b6d..2757983e 100644 --- a/plugins/Bigfile/BigfilePlugin.py +++ b/plugins/Bigfile/BigfilePlugin.py @@ -5,7 +5,6 @@ import shutil import collections import math -import msgpack import gevent import gevent.lock @@ -15,7 +14,7 @@ from Crypt import CryptHash from lib import merkletools from util import helper import util -from BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked +from .BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked # We can only import plugin host clases after the plugins are loaded @@ -61,7 +60,7 @@ class UiRequestPlugin(object): ) if len(piecemap_info["sha512_pieces"]) == 1: # Small file, don't split - hash = piecemap_info["sha512_pieces"][0].encode("hex") + hash = piecemap_info["sha512_pieces"][0].hex() hash_id = site.content_manager.hashfield.getHashId(hash) site.content_manager.optionalDownloaded(inner_path, hash_id, upload_info["size"], own=True) @@ -178,7 +177,7 @@ class UiWebsocketPlugin(object): self.site.content_manager.loadContent(file_info["content_inner_path"], add_bad_files=False, force=True) try: self.site.storage.delete(piecemap_inner_path) - except Exception, err: + except Exception as err: self.log.error("File %s delete error: %s" % (piecemap_inner_path, err)) return super(UiWebsocketPlugin, self).actionFileDelete(to, inner_path) @@ -324,7 +323,7 @@ class ContentManagerPlugin(object): def verifyPiece(self, inner_path, pos, piece): piecemap = self.getPiecemap(inner_path) - piece_i = pos / piecemap["piece_size"] + piece_i = int(pos / piecemap["piece_size"]) if CryptHash.sha512sum(piece, format="digest") != piecemap["sha512_pieces"][piece_i]: raise VerifyError("Invalid hash") return True @@ -345,7 +344,7 @@ class ContentManagerPlugin(object): file_info = self.getFileInfo(inner_path) # Mark piece downloaded - piece_i = pos_from / file_info["piece_size"] + piece_i = int(pos_from / file_info["piece_size"]) self.site.storage.piecefields[file_info["sha512"]][piece_i] = True # Only add to site size on first request @@ -368,7 +367,7 @@ class ContentManagerPlugin(object): del self.site.storage.piecefields[sha512] # Also remove other pieces of the file from download queue - for key in self.site.bad_files.keys(): + for key in list(self.site.bad_files.keys()): if key.startswith(inner_path + "|"): del self.site.bad_files[key] self.site.worker_manager.removeSolvedFileTasks() @@ -381,9 +380,9 @@ class SiteStoragePlugin(object): super(SiteStoragePlugin, self).__init__(*args, **kwargs) self.piecefields = collections.defaultdict(BigfilePiecefield) if "piecefields" in self.site.settings.get("cache", {}): - for sha512, piecefield_packed in self.site.settings["cache"].get("piecefields").iteritems(): + for sha512, piecefield_packed in self.site.settings["cache"].get("piecefields").items(): if piecefield_packed: - self.piecefields[sha512].unpack(piecefield_packed.decode("base64")) + self.piecefields[sha512].unpack(base64.b64decode(piecefield_packed)) self.site.settings["cache"]["piecefields"] = {} def createSparseFile(self, inner_path, size, sha512=None): @@ -486,7 +485,7 @@ class BigFile(object): requests = [] # Request all required blocks while 1: - piece_i = pos / self.piece_size + piece_i = int(pos / self.piece_size) if piece_i * self.piece_size >= read_until: break pos_from = piece_i * self.piece_size @@ -503,7 +502,7 @@ class BigFile(object): prebuffer_until = min(self.size, read_until + self.prebuffer) priority = 3 while 1: - piece_i = pos / self.piece_size + piece_i = int(pos / self.piece_size) if piece_i * self.piece_size >= prebuffer_until: break pos_from = piece_i * self.piece_size @@ -565,7 +564,7 @@ class WorkerManagerPlugin(object): inner_path, file_range = inner_path.split("|") pos_from, pos_to = map(int, file_range.split("-")) - task["piece_i"] = pos_from / file_info["piece_size"] + task["piece_i"] = int(pos_from / file_info["piece_size"]) task["sha512"] = file_info["sha512"] else: if inner_path in self.site.bad_files: @@ -601,10 +600,10 @@ class WorkerManagerPlugin(object): class FileRequestPlugin(object): def isReadable(self, site, inner_path, file, pos): # Peek into file - if file.read(10) == "\0" * 10: + if file.read(10) == b"\0" * 10: # Looks empty, but makes sures we don't have that piece file_info = site.content_manager.getFileInfo(inner_path) - piece_i = pos / file_info["piece_size"] + piece_i = int(pos / file_info["piece_size"]) if not site.storage.piecefields[file_info["sha512"]][piece_i]: return False # Seek back to position we want to read @@ -622,7 +621,7 @@ class FileRequestPlugin(object): if not peer.connection: # Just added peer.connect(self.connection) # Assign current connection to peer - piecefields_packed = {sha512: piecefield.pack() for sha512, piecefield in site.storage.piecefields.iteritems()} + piecefields_packed = {sha512: piecefield.pack() for sha512, piecefield in site.storage.piecefields.items()} self.response({"piecefields_packed": piecefields_packed}) def actionSetPiecefields(self, params): @@ -638,7 +637,7 @@ class FileRequestPlugin(object): peer.connect(self.connection) peer.piecefields = collections.defaultdict(BigfilePiecefieldPacked) - for sha512, piecefield_packed in params["piecefields_packed"].iteritems(): + for sha512, piecefield_packed in params["piecefields_packed"].items(): peer.piecefields[sha512].unpack(piecefield_packed) site.settings["has_bigfile"] = True @@ -673,7 +672,7 @@ class PeerPlugin(object): self.piecefields = collections.defaultdict(BigfilePiecefieldPacked) try: - for sha512, piecefield_packed in res["piecefields_packed"].iteritems(): + for sha512, piecefield_packed in res["piecefields_packed"].items(): self.piecefields[sha512].unpack(piecefield_packed) except Exception as err: self.log("Invalid updatePiecefields response: %s" % Debug.formatException(err)) @@ -720,7 +719,7 @@ class SitePlugin(object): def getSettingsCache(self): back = super(SitePlugin, self).getSettingsCache() if self.storage.piecefields: - back["piecefields"] = {sha512: piecefield.pack().encode("base64") for sha512, piecefield in self.storage.piecefields.iteritems()} + back["piecefields"] = {sha512: base64.b64encode(piecefield.pack()).decode("utf8") for sha512, piecefield in self.storage.piecefields.items()} return back def needFile(self, inner_path, *args, **kwargs): diff --git a/plugins/Bigfile/Test/TestBigfile.py b/plugins/Bigfile/Test/TestBigfile.py index 2b71ec8e..d6c057bf 100644 --- a/plugins/Bigfile/Test/TestBigfile.py +++ b/plugins/Bigfile/Test/TestBigfile.py @@ -1,5 +1,5 @@ import time -from cStringIO import StringIO +import io import pytest import msgpack @@ -40,7 +40,7 @@ class TestBigfile: piecemap = msgpack.unpack(site.storage.open(file_node["piecemap"], "rb"))["optional.any.iso"] assert len(piecemap["sha512_pieces"]) == 10 assert piecemap["sha512_pieces"][0] != piecemap["sha512_pieces"][1] - assert piecemap["sha512_pieces"][0].encode("hex") == "a73abad9992b3d0b672d0c2a292046695d31bebdcb1e150c8410bbe7c972eff3" + assert piecemap["sha512_pieces"][0].hex() == "a73abad9992b3d0b672d0c2a292046695d31bebdcb1e150c8410bbe7c972eff3" def testVerifyPiece(self, site): inner_path = self.createBigfile(site) @@ -48,7 +48,7 @@ class TestBigfile: # Verify all 10 piece f = site.storage.open(inner_path, "rb") for i in range(10): - piece = StringIO(f.read(1024 * 1024)) + piece = io.BytesIO(f.read(1024 * 1024)) piece.seek(0) site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece) f.close() @@ -57,7 +57,7 @@ class TestBigfile: with pytest.raises(VerifyError) as err: i = 1 f = site.storage.open(inner_path, "rb") - piece = StringIO(f.read(1024 * 1024)) + piece = io.BytesIO(f.read(1024 * 1024)) f.close() site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece) assert "Invalid hash" in str(err) @@ -70,19 +70,19 @@ class TestBigfile: # Write to file beginning s = time.time() - f = site.storage.write("%s|%s-%s" % (inner_path, 0, 1024 * 1024), "hellostart" * 1024) + f = site.storage.write("%s|%s-%s" % (inner_path, 0, 1024 * 1024), b"hellostart" * 1024) time_write_start = time.time() - s # Write to file end s = time.time() - f = site.storage.write("%s|%s-%s" % (inner_path, 99 * 1024 * 1024, 99 * 1024 * 1024 + 1024 * 1024), "helloend" * 1024) + f = site.storage.write("%s|%s-%s" % (inner_path, 99 * 1024 * 1024, 99 * 1024 * 1024 + 1024 * 1024), b"helloend" * 1024) time_write_end = time.time() - s # Verify writes f = site.storage.open(inner_path) - assert f.read(10) == "hellostart" + assert f.read(10) == b"hellostart" f.seek(99 * 1024 * 1024) - assert f.read(8) == "helloend" + assert f.read(8) == b"helloend" f.close() site.storage.delete(inner_path) @@ -105,7 +105,7 @@ class TestBigfile: buff = peer_file_server.getFile(site_temp.address, "%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024)) assert len(buff.getvalue()) == 1 * 1024 * 1024 # Correct block size - assert buff.getvalue().startswith("Test524") # Correct data + assert buff.getvalue().startswith(b"Test524") # Correct data buff.seek(0) assert site.content_manager.verifyPiece(inner_path, 5 * 1024 * 1024, buff) # Correct hash @@ -147,12 +147,12 @@ class TestBigfile: # Verify 0. block not downloaded f = site_temp.storage.open(inner_path) - assert f.read(10) == "\0" * 10 + assert f.read(10) == b"\0" * 10 # Verify 5. and 10. block downloaded f.seek(5 * 1024 * 1024) - assert f.read(7) == "Test524" + assert f.read(7) == b"Test524" f.seek(9 * 1024 * 1024) - assert f.read(7) == "943---T" + assert f.read(7) == b"943---T" # Verify hashfield assert set(site_temp.content_manager.hashfield) == set([18343, 30970]) # 18343: data/optional.any.iso, 30970: data/optional.any.iso.hashmap.msgpack @@ -178,14 +178,14 @@ class TestBigfile: with site_temp.storage.openBigfile(inner_path) as f: with Spy.Spy(FileRequest, "route") as requests: f.seek(5 * 1024 * 1024) - assert f.read(7) == "Test524" + assert f.read(7) == b"Test524" f.seek(9 * 1024 * 1024) - assert f.read(7) == "943---T" + assert f.read(7) == b"943---T" assert len(requests) == 4 # 1x peicemap + 1x getpiecefield + 2x for pieces - assert set(site_temp.content_manager.hashfield) == set([18343, 30970]) + assert set(site_temp.content_manager.hashfield) == set([18343, 43727]) assert site_temp.storage.piecefields[f.sha512].tostring() == "0000010001" assert f.sha512 in site_temp.getSettingsCache()["piecefields"] @@ -193,7 +193,7 @@ class TestBigfile: # Test requesting already downloaded with Spy.Spy(FileRequest, "route") as requests: f.seek(5 * 1024 * 1024) - assert f.read(7) == "Test524" + assert f.read(7) == b"Test524" assert len(requests) == 0 @@ -201,9 +201,9 @@ class TestBigfile: with Spy.Spy(FileRequest, "route") as requests: f.seek(5 * 1024 * 1024) # We already have this block data = f.read(1024 * 1024 * 3) # Our read overflow to 6. and 7. block - assert data.startswith("Test524") - assert data.endswith("Test838-") - assert "\0" not in data # No null bytes allowed + assert data.startswith(b"Test524") + assert data.endswith(b"Test838-") + assert b"\0" not in data # No null bytes allowed assert len(requests) == 2 # Two block download @@ -258,11 +258,11 @@ class TestBigfile: # Download second block with site_temp.storage.openBigfile(inner_path) as f: f.seek(1024 * 1024) - assert f.read(1024)[0] != "\0" + assert f.read(1024)[0:1] != b"\0" # Make sure first block not download with site_temp.storage.open(inner_path) as f: - assert f.read(1024)[0] == "\0" + assert f.read(1024)[0:1] == b"\0" peer2 = site.addPeer(file_server.ip, 1545, return_peer=True) @@ -284,8 +284,8 @@ class TestBigfile: s = time.time() for i in range(25000): site.addPeer(file_server.ip, i) - print "%.3fs MEM: + %sKB" % (time.time() - s, (meminfo()[0] - mem_s) / 1024) # 0.082s MEM: + 6800KB - print site.peers.values()[0].piecefields + print("%.3fs MEM: + %sKB" % (time.time() - s, (meminfo()[0] - mem_s) / 1024)) # 0.082s MEM: + 6800KB + print(list(site.peers.values())[0].piecefields) def testUpdatePiecefield(self, file_server, site, site_temp): inner_path = self.createBigfile(site) @@ -390,16 +390,16 @@ class TestBigfile: size_bigfile = site_temp.content_manager.getFileInfo(inner_path)["size"] with site_temp.storage.openBigfile(inner_path) as f: - assert "\0" not in f.read(1024) + assert b"\0" not in f.read(1024) assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile with site_temp.storage.openBigfile(inner_path) as f: # Don't count twice - assert "\0" not in f.read(1024) + assert b"\0" not in f.read(1024) assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile # Add second block - assert "\0" not in f.read(1024 * 1024) + assert b"\0" not in f.read(1024 * 1024) assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile def testPrebuffer(self, file_server, site, site_temp): @@ -423,7 +423,7 @@ class TestBigfile: with site_temp.storage.openBigfile(inner_path, prebuffer=1024 * 1024 * 2) as f: with Spy.Spy(FileRequest, "route") as requests: f.seek(5 * 1024 * 1024) - assert f.read(7) == "Test524" + assert f.read(7) == b"Test524" # assert len(requests) == 3 # 1x piecemap + 1x getpiecefield + 1x for pieces assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 2 @@ -434,7 +434,7 @@ class TestBigfile: # No prebuffer beyond end of the file f.seek(9 * 1024 * 1024) - assert "\0" not in f.read(7) + assert b"\0" not in f.read(7) assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 0 diff --git a/plugins/Bigfile/__init__.py b/plugins/Bigfile/__init__.py index 005d6661..cf2dcb49 100644 --- a/plugins/Bigfile/__init__.py +++ b/plugins/Bigfile/__init__.py @@ -1,2 +1,2 @@ -import BigfilePlugin -from BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked \ No newline at end of file +from . import BigfilePlugin +from .BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked \ No newline at end of file diff --git a/plugins/Chart/ChartCollector.py b/plugins/Chart/ChartCollector.py index 471c4b91..ad4d11a8 100644 --- a/plugins/Chart/ChartCollector.py +++ b/plugins/Chart/ChartCollector.py @@ -29,7 +29,7 @@ class ChartCollector(object): sites = file_server.sites if not sites: return collectors - content_db = sites.values()[0].content_manager.contents.db + content_db = list(sites.values())[0].content_manager.contents.db # Connection stats collectors["connection"] = lambda: len(file_server.connections) @@ -67,8 +67,8 @@ class ChartCollector(object): collectors["optional_downloaded"] = lambda: sum([site.settings.get("optional_downloaded", 0) for site in sites.values()]) # Peers - collectors["peer"] = lambda (peers): len(peers) - collectors["peer_onion"] = lambda (peers): len([True for peer in peers if ".onion" in peer]) + collectors["peer"] = lambda peers: len(peers) + collectors["peer_onion"] = lambda peers: len([True for peer in peers if ".onion" in peer]) # Size collectors["size"] = lambda: sum([site.settings.get("size", 0) for site in sites.values()]) @@ -81,21 +81,21 @@ class ChartCollector(object): site_collectors = {} # Size - site_collectors["site_size"] = lambda(site): site.settings.get("size", 0) - site_collectors["site_size_optional"] = lambda(site): site.settings.get("size_optional", 0) - site_collectors["site_optional_downloaded"] = lambda(site): site.settings.get("optional_downloaded", 0) - site_collectors["site_content"] = lambda(site): len(site.content_manager.contents) + site_collectors["site_size"] = lambda site: site.settings.get("size", 0) + site_collectors["site_size_optional"] = lambda site: site.settings.get("size_optional", 0) + site_collectors["site_optional_downloaded"] = lambda site: site.settings.get("optional_downloaded", 0) + site_collectors["site_content"] = lambda site: len(site.content_manager.contents) # Data transfer - site_collectors["site_bytes_recv|change"] = lambda(site): site.settings.get("bytes_recv", 0) - site_collectors["site_bytes_sent|change"] = lambda(site): site.settings.get("bytes_sent", 0) + site_collectors["site_bytes_recv|change"] = lambda site: site.settings.get("bytes_recv", 0) + site_collectors["site_bytes_sent|change"] = lambda site: site.settings.get("bytes_sent", 0) # Peers - site_collectors["site_peer"] = lambda(site): len(site.peers) - site_collectors["site_peer_onion"] = lambda(site): len( - [True for peer in site.peers.itervalues() if peer.ip.endswith(".onion")] + site_collectors["site_peer"] = lambda site: len(site.peers) + site_collectors["site_peer_onion"] = lambda site: len( + [True for peer in site.peers.values() if peer.ip.endswith(".onion")] ) - site_collectors["site_peer_connected"] = lambda(site): len([True for peer in site.peers.itervalues() if peer.connection]) + site_collectors["site_peer_connected"] = lambda site: len([True for peer in site.peers.values() if peer.connection]) return site_collectors @@ -109,7 +109,7 @@ class ChartCollector(object): if site is None: peers = self.getUniquePeers() datas = {} - for key, collector in collectors.iteritems(): + for key, collector in collectors.items(): try: if site: value = collector(site) @@ -138,7 +138,7 @@ class ChartCollector(object): s = time.time() datas = self.collectDatas(collectors, last_values["global"]) values = [] - for key, value in datas.iteritems(): + for key, value in datas.items(): values.append((self.db.getTypeId(key), value, now)) self.log.debug("Global collectors done in %.3fs" % (time.time() - s)) @@ -154,9 +154,9 @@ class ChartCollector(object): now = int(time.time()) s = time.time() values = [] - for address, site in sites.iteritems(): + for address, site in sites.items(): site_datas = self.collectDatas(collectors, last_values["site:%s" % address], site) - for key, value in site_datas.iteritems(): + for key, value in site_datas.items(): values.append((self.db.getTypeId(key), self.db.getSiteId(address), value, now)) time.sleep(0.000001) self.log.debug("Site collections done in %.3fs" % (time.time() - s)) diff --git a/plugins/Chart/ChartPlugin.py b/plugins/Chart/ChartPlugin.py index a491618b..ddc1e609 100644 --- a/plugins/Chart/ChartPlugin.py +++ b/plugins/Chart/ChartPlugin.py @@ -6,8 +6,8 @@ import gevent from Config import config from util import helper from Plugin import PluginManager -from ChartDb import ChartDb -from ChartCollector import ChartCollector +from .ChartDb import ChartDb +from .ChartCollector import ChartCollector if "db" not in locals().keys(): # Share on reloads db = ChartDb() @@ -39,7 +39,7 @@ class UiWebsocketPlugin(object): if not query.strip().upper().startswith("SELECT"): raise Exception("Only SELECT query supported") res = db.execute(query, params) - except Exception, err: # Response the error to client + except Exception as err: # Response the error to client self.log.error("ChartDbQuery error: %s" % err) return {"error": str(err)} # Convert result to dict diff --git a/plugins/Chart/__init__.py b/plugins/Chart/__init__.py index 78981122..2c284609 100644 --- a/plugins/Chart/__init__.py +++ b/plugins/Chart/__init__.py @@ -1 +1 @@ -import ChartPlugin \ No newline at end of file +from . import ChartPlugin \ No newline at end of file diff --git a/plugins/ContentFilter/ContentFilterPlugin.py b/plugins/ContentFilter/ContentFilterPlugin.py index 4c30a140..f6d74e7a 100644 --- a/plugins/ContentFilter/ContentFilterPlugin.py +++ b/plugins/ContentFilter/ContentFilterPlugin.py @@ -1,13 +1,13 @@ import time import re -import cgi +import html import hashlib from Plugin import PluginManager from Translate import Translate from Config import config -from ContentFilterStorage import ContentFilterStorage +from .ContentFilterStorage import ContentFilterStorage if "_" not in locals(): @@ -39,8 +39,8 @@ class UiWebsocketPlugin(object): else: self.cmd( "confirm", - [_["Hide all content from %s?"] % cgi.escape(cert_user_id), _["Mute"]], - lambda (res): self.cbMuteAdd(to, auth_address, cert_user_id, reason) + [_["Hide all content from %s?"] % html.escape(cert_user_id), _["Mute"]], + lambda res: self.cbMuteAdd(to, auth_address, cert_user_id, reason) ) def cbMuteRemove(self, to, auth_address): @@ -55,8 +55,8 @@ class UiWebsocketPlugin(object): else: self.cmd( "confirm", - [_["Unmute %s?"] % cgi.escape(filter_storage.file_content["mutes"][auth_address]["cert_user_id"]), _["Unmute"]], - lambda (res): self.cbMuteRemove(to, auth_address) + [_["Unmute %s?"] % html.escape(filter_storage.file_content["mutes"][auth_address]["cert_user_id"]), _["Unmute"]], + lambda res: self.cbMuteRemove(to, auth_address) ) def actionMuteList(self, to): @@ -101,13 +101,13 @@ class UiWebsocketPlugin(object): else: content = site.storage.loadJson(inner_path) title = _["New shared global content filter: %s (%s sites, %s users)"] % ( - cgi.escape(inner_path), len(content.get("siteblocks", {})), len(content.get("mutes", {})) + html.escape(inner_path), len(content.get("siteblocks", {})), len(content.get("mutes", {})) ) self.cmd( "confirm", [title, "Add"], - lambda (res): self.cbFilterIncludeAdd(to, res, address, inner_path, description) + lambda res: self.cbFilterIncludeAdd(to, res, address, inner_path, description) ) def cbFilterIncludeAdd(self, to, res, address, inner_path, description): @@ -189,7 +189,7 @@ class UiRequestPlugin(object): address = self.server.site_manager.resolveDomain(address) if address: - address_sha256 = "0x" + hashlib.sha256(address).hexdigest() + address_sha256 = "0x" + hashlib.sha256(address.encode("utf8")).hexdigest() else: address_sha256 = None diff --git a/plugins/ContentFilter/ContentFilterStorage.py b/plugins/ContentFilter/ContentFilterStorage.py index 17af298f..84908e09 100644 --- a/plugins/ContentFilter/ContentFilterStorage.py +++ b/plugins/ContentFilter/ContentFilterStorage.py @@ -62,7 +62,7 @@ class ContentFilterStorage(object): ) continue - for key, val in content.iteritems(): + for key, val in content.items(): if type(val) is not dict: continue diff --git a/plugins/ContentFilter/__init__.py b/plugins/ContentFilter/__init__.py index 4d8c3acc..2cbca8ee 100644 --- a/plugins/ContentFilter/__init__.py +++ b/plugins/ContentFilter/__init__.py @@ -1 +1 @@ -import ContentFilterPlugin +from . import ContentFilterPlugin diff --git a/plugins/Cors/CorsPlugin.py b/plugins/Cors/CorsPlugin.py index 8d758988..af501462 100644 --- a/plugins/Cors/CorsPlugin.py +++ b/plugins/Cors/CorsPlugin.py @@ -1,5 +1,5 @@ import re -import cgi +import html import copy from Plugin import PluginManager @@ -78,8 +78,8 @@ class UiWebsocketPlugin(object): self.cmd( "confirm", - [_["This site requests read permission to: %s"] % cgi.escape(site_name), button_title], - lambda (res): self.cbCorsPermission(to, address) + [_["This site requests read permission to: %s"] % html.escape(site_name), button_title], + lambda res: self.cbCorsPermission(to, address) ) def cbCorsPermission(self, to, address): diff --git a/plugins/Cors/__init__.py b/plugins/Cors/__init__.py index bca1ab3e..bcaa502b 100644 --- a/plugins/Cors/__init__.py +++ b/plugins/Cors/__init__.py @@ -1 +1 @@ -import CorsPlugin \ No newline at end of file +from . import CorsPlugin \ No newline at end of file diff --git a/plugins/CryptMessage/CryptMessage.py b/plugins/CryptMessage/CryptMessage.py index 955dd9b1..88441e44 100644 --- a/plugins/CryptMessage/CryptMessage.py +++ b/plugins/CryptMessage/CryptMessage.py @@ -43,11 +43,11 @@ def getEcc(privatekey=None): def toOpensslPrivatekey(privatekey): privatekey_bin = btctools.encode_privkey(privatekey, "bin") - return '\x02\xca\x00\x20' + privatekey_bin + return b'\x02\xca\x00\x20' + privatekey_bin def toOpensslPublickey(publickey): publickey_bin = btctools.encode_pubkey(publickey, "bin") publickey_bin = publickey_bin[1:] - publickey_openssl = '\x02\xca\x00 ' + publickey_bin[:32] + '\x00 ' + publickey_bin[32:] + publickey_openssl = b'\x02\xca\x00 ' + publickey_bin[:32] + b'\x00 ' + publickey_bin[32:] return publickey_openssl diff --git a/plugins/CryptMessage/CryptMessagePlugin.py b/plugins/CryptMessage/CryptMessagePlugin.py index 71499eca..e37e4c17 100644 --- a/plugins/CryptMessage/CryptMessagePlugin.py +++ b/plugins/CryptMessage/CryptMessagePlugin.py @@ -3,9 +3,9 @@ import os from Plugin import PluginManager from Crypt import CryptBitcoin -from lib.pybitcointools import bitcoin as btctools +import lib.pybitcointools as btctools -import CryptMessage +from . import CryptMessage @PluginManager.registerTo("UiWebsocket") diff --git a/plugins/CryptMessage/__init__.py b/plugins/CryptMessage/__init__.py index 3eb41820..6aeb4e52 100644 --- a/plugins/CryptMessage/__init__.py +++ b/plugins/CryptMessage/__init__.py @@ -1 +1 @@ -import CryptMessagePlugin \ No newline at end of file +from . import CryptMessagePlugin \ No newline at end of file diff --git a/plugins/FilePack/FilePackPlugin.py b/plugins/FilePack/FilePackPlugin.py index 8d662bba..bbaf0d29 100644 --- a/plugins/FilePack/FilePackPlugin.py +++ b/plugins/FilePack/FilePackPlugin.py @@ -48,7 +48,7 @@ class UiRequestPlugin(object): if ".zip/" in path or ".tar.gz/" in path: file_obj = None path_parts = self.parsePath(path) - file_path = u"%s/%s/%s" % (config.data_dir, path_parts["address"], path_parts["inner_path"].decode("utf8")) + file_path = "%s/%s/%s" % (config.data_dir, path_parts["address"], path_parts["inner_path"]) match = re.match("^(.*\.(?:tar.gz|tar.bz2|zip))/(.*)", file_path) archive_path, path_within = match.groups() if archive_path not in archive_cache: diff --git a/plugins/FilePack/__init__.py b/plugins/FilePack/__init__.py index ab07a1ff..660a0920 100644 --- a/plugins/FilePack/__init__.py +++ b/plugins/FilePack/__init__.py @@ -1 +1 @@ -import FilePackPlugin \ No newline at end of file +from . import FilePackPlugin \ No newline at end of file diff --git a/plugins/MergerSite/MergerSitePlugin.py b/plugins/MergerSite/MergerSitePlugin.py index 3de92a91..36c1dbae 100644 --- a/plugins/MergerSite/MergerSitePlugin.py +++ b/plugins/MergerSite/MergerSitePlugin.py @@ -66,7 +66,7 @@ class UiWebsocketPlugin(object): self.cmd( "confirm", [_["Add %s new site?"] % len(addresses), "Add"], - lambda (res): self.cbMergerSiteAdd(to, addresses) + lambda res: self.cbMergerSiteAdd(to, addresses) ) self.response(to, "ok") @@ -102,7 +102,7 @@ class UiWebsocketPlugin(object): ret = {} if not merger_types: return self.response(to, {"error": "Not a merger site"}) - for address, merged_type in merged_db.iteritems(): + for address, merged_type in merged_db.items(): if merged_type not in merger_types: continue # Site not for us if query_site_info: @@ -215,7 +215,7 @@ class UiWebsocketPlugin(object): if not re.match("^[A-Za-z0-9-]+$", merger_type): raise Exception("Invalid merger_type: %s" % merger_type) merged_sites = [] - for address, merged_type in merged_db.iteritems(): + for address, merged_type in merged_db.items(): if merged_type != merger_type: continue site = self.server.sites.get(address) @@ -253,18 +253,18 @@ class SiteStoragePlugin(object): # Not a merger site, that's all if not merger_types: - raise StopIteration + return merged_sites = [ site_manager.sites[address] - for address, merged_type in merged_db.iteritems() + for address, merged_type in merged_db.items() if merged_type in merger_types ] found = 0 for merged_site in merged_sites: self.log.debug("Loading merged site: %s" % merged_site) merged_type = merged_db[merged_site.address] - for content_inner_path, content in merged_site.content_manager.contents.iteritems(): + for content_inner_path, content in merged_site.content_manager.contents.items(): # content.json file itself if merged_site.storage.isFile(content_inner_path): # Missing content.json file merged_inner_path = "merged-%s/%s/%s" % (merged_type, merged_site.address, content_inner_path) @@ -273,7 +273,7 @@ class SiteStoragePlugin(object): merged_site.log.error("[MISSING] %s" % content_inner_path) # Data files in content.json content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site - for file_relative_path in content.get("files", {}).keys() + content.get("files_optional", {}).keys(): + for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()): if not file_relative_path.endswith(".json"): continue # We only interesed in json files file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir @@ -285,7 +285,7 @@ class SiteStoragePlugin(object): merged_site.log.error("[MISSING] %s" % file_inner_path) found += 1 if found % 100 == 0: - time.sleep(0.000001) # Context switch to avoid UI block + time.sleep(0.001) # Context switch to avoid UI block # Also notice merger sites on a merged site file change def onUpdated(self, inner_path, file=None): @@ -339,11 +339,11 @@ class SiteManagerPlugin(object): site_manager = self if not self.sites: return - for site in self.sites.itervalues(): + for site in self.sites.values(): # Update merged sites try: merged_type = site.content_manager.contents.get("content.json", {}).get("merged_type") - except Exception, err: + except Exception as err: self.log.error("Error loading site %s: %s" % (site.address, Debug.formatException(err))) continue if merged_type: @@ -368,7 +368,7 @@ class SiteManagerPlugin(object): # Update merged to merger if merged_type: - for merger_site in self.sites.itervalues(): + for merger_site in self.sites.values(): if "Merger:" + merged_type in merger_site.settings["permissions"]: if site.address not in merged_to_merger: merged_to_merger[site.address] = [] diff --git a/plugins/MergerSite/__init__.py b/plugins/MergerSite/__init__.py index f1f3412c..2cf54611 100644 --- a/plugins/MergerSite/__init__.py +++ b/plugins/MergerSite/__init__.py @@ -1 +1 @@ -import MergerSitePlugin \ No newline at end of file +from . import MergerSitePlugin \ No newline at end of file diff --git a/plugins/Newsfeed/NewsfeedPlugin.py b/plugins/Newsfeed/NewsfeedPlugin.py index 802fa50b..8d040127 100644 --- a/plugins/Newsfeed/NewsfeedPlugin.py +++ b/plugins/Newsfeed/NewsfeedPlugin.py @@ -37,7 +37,7 @@ class UiWebsocketPlugin(object): total_s = time.time() num_sites = 0 - for address, site_data in self.user.sites.items(): + for address, site_data in list(self.user.sites.items()): feeds = site_data.get("follow") if not feeds: continue @@ -45,7 +45,7 @@ class UiWebsocketPlugin(object): self.log.debug("Invalid feed for site %s" % address) continue num_sites += 1 - for name, query_set in feeds.iteritems(): + for name, query_set in feeds.items(): site = SiteManager.site_manager.get(address) if not site or not site.storage.has_db: continue @@ -78,7 +78,7 @@ class UiWebsocketPlugin(object): for row in res: row = dict(row) - if not isinstance(row["date_added"], (int, long, float, complex)): + if not isinstance(row["date_added"], (int, float, complex)): self.log.debug("Invalid date_added from site %s: %r" % (address, row["date_added"])) continue if row["date_added"] > 1000000000000: # Formatted as millseconds @@ -116,7 +116,7 @@ class UiWebsocketPlugin(object): search_text, filters = self.parseSearch(search) - for address, site in SiteManager.site_manager.list().iteritems(): + for address, site in SiteManager.site_manager.list().items(): if not site.storage.has_db: continue @@ -137,7 +137,7 @@ class UiWebsocketPlugin(object): num_sites += 1 - for name, query in feeds.iteritems(): + for name, query in feeds.items(): s = time.time() try: db_query = DbQuery(query) @@ -162,7 +162,7 @@ class UiWebsocketPlugin(object): db_query.parts["LIMIT"] = str(limit) res = site.storage.query(str(db_query), params) - except Exception, err: + except Exception as err: self.log.error("%s feed query %s error: %s" % (address, name, Debug.formatException(err))) stats.append({"site": site.address, "feed_name": name, "error": str(err), "query": query}) continue diff --git a/plugins/Newsfeed/__init__.py b/plugins/Newsfeed/__init__.py index 20cc04a1..6e624df6 100644 --- a/plugins/Newsfeed/__init__.py +++ b/plugins/Newsfeed/__init__.py @@ -1 +1 @@ -import NewsfeedPlugin \ No newline at end of file +from . import NewsfeedPlugin \ No newline at end of file diff --git a/plugins/OptionalManager/ContentDbPlugin.py b/plugins/OptionalManager/ContentDbPlugin.py index 1a1f10af..f3716b44 100644 --- a/plugins/OptionalManager/ContentDbPlugin.py +++ b/plugins/OptionalManager/ContentDbPlugin.py @@ -88,8 +88,8 @@ class ContentDbPlugin(object): site_sizes[row["site_id"]]["optional_downloaded"] += row["size"] # Site site size stats to sites.json settings - site_ids_reverse = {val: key for key, val in self.site_ids.iteritems()} - for site_id, stats in site_sizes.iteritems(): + site_ids_reverse = {val: key for key, val in self.site_ids.items()} + for site_id, stats in site_sizes.items(): site_address = site_ids_reverse.get(site_id) if not site_address: self.log.error("Not found site_id: %s" % site_id) @@ -166,7 +166,7 @@ class ContentDbPlugin(object): num = 0 site_id = self.site_ids[site.address] content_inner_dir = helper.getDirname(content_inner_path) - for relative_inner_path, file in content.get("files_optional", {}).iteritems(): + for relative_inner_path, file in content.get("files_optional", {}).items(): file_inner_path = content_inner_dir + relative_inner_path hash_id = int(file["sha512"][0:4], 16) if hash_id in site.content_manager.hashfield: @@ -232,14 +232,14 @@ class ContentDbPlugin(object): num_file = 0 num_updated = 0 num_site = 0 - for site in self.sites.values(): + for site in list(self.sites.values()): if not site.content_manager.has_optional_files: continue if not site.settings["serving"]: continue has_updated_hashfield = next(( peer - for peer in site.peers.itervalues() + for peer in site.peers.values() if peer.has_hashfield and peer.hashfield.time_changed > self.time_peer_numbers_updated ), None) @@ -248,7 +248,7 @@ class ContentDbPlugin(object): hashfield_peers = itertools.chain.from_iterable( peer.hashfield.storage - for peer in site.peers.itervalues() + for peer in site.peers.values() if peer.has_hashfield ) peer_nums = collections.Counter( @@ -270,7 +270,7 @@ class ContentDbPlugin(object): updates[row["file_id"]] = peer_num self.execute("BEGIN") - for file_id, peer_num in updates.iteritems(): + for file_id, peer_num in updates.items(): self.execute("UPDATE file_optional SET peer = ? WHERE file_id = ?", (peer_num, file_id)) self.execute("END") @@ -394,7 +394,7 @@ class ContentDbPlugin(object): self.updatePeerNumbers() - site_ids_reverse = {val: key for key, val in self.site_ids.iteritems()} + site_ids_reverse = {val: key for key, val in self.site_ids.items()} deleted_file_ids = [] for row in self.queryDeletableFiles(): site_address = site_ids_reverse.get(row["site_id"]) diff --git a/plugins/OptionalManager/OptionalManagerPlugin.py b/plugins/OptionalManager/OptionalManagerPlugin.py index 9da93041..4e1b4336 100644 --- a/plugins/OptionalManager/OptionalManagerPlugin.py +++ b/plugins/OptionalManager/OptionalManagerPlugin.py @@ -6,7 +6,7 @@ import gevent from util import helper from Plugin import PluginManager -import ContentDbPlugin +from . import ContentDbPlugin # We can only import plugin host clases after the plugins are loaded @@ -24,7 +24,7 @@ def processAccessLog(): for site_id in access_log: content_db.execute( "UPDATE file_optional SET time_accessed = %s WHERE ?" % now, - {"site_id": site_id, "inner_path": access_log[site_id].keys()} + {"site_id": site_id, "inner_path": list(access_log[site_id].keys())} ) num += len(access_log[site_id]) access_log.clear() @@ -37,7 +37,7 @@ def processRequestLog(): num = 0 cur.execute("BEGIN") for site_id in request_log: - for inner_path, uploaded in request_log[site_id].iteritems(): + for inner_path, uploaded in request_log[site_id].items(): content_db.execute( "UPDATE file_optional SET uploaded = uploaded + %s WHERE ?" % uploaded, {"site_id": site_id, "inner_path": inner_path} @@ -101,7 +101,7 @@ class ContentManagerPlugin(object): {"site_id": self.contents.db.site_ids[self.site.address], "hash_id": hash_id} ) row = res.fetchone() - if row and row[0]: + if row and row["is_downloaded"]: return True else: return False @@ -191,7 +191,7 @@ class SitePlugin(object): if is_downloadable: return is_downloadable - for path in self.settings.get("optional_help", {}).iterkeys(): + for path in self.settings.get("optional_help", {}).keys(): if inner_path.startswith(path): return True diff --git a/plugins/OptionalManager/Test/TestOptionalManager.py b/plugins/OptionalManager/Test/TestOptionalManager.py index 00a5fcb7..4325cb2c 100644 --- a/plugins/OptionalManager/Test/TestOptionalManager.py +++ b/plugins/OptionalManager/Test/TestOptionalManager.py @@ -1,15 +1,7 @@ -import hashlib -import os import copy -import json -from cStringIO import StringIO import pytest -from OptionalManager import OptionalManagerPlugin -from util import helper -from Crypt import CryptBitcoin - @pytest.mark.usefixtures("resetSettings") class TestOptionalManager: @@ -58,7 +50,7 @@ class TestOptionalManager: assert not file_row["is_downloaded"] # Write file from outside of ZeroNet - site.storage.open("testfile", "wb").write("A" * 1234) # For quick check hash does not matter only file size + site.storage.open("testfile", "wb").write(b"A" * 1234) # For quick check hash does not matter only file size hashfield_len_before = len(site.content_manager.hashfield) site.storage.verifyFiles(quick_check=True) @@ -92,8 +84,8 @@ class TestOptionalManager: assert site.content_manager.hashfield.getHashId("aaaabbbbcccc") == site.content_manager.hashfield.getHashId("aaaabbbbdddd") # Write files from outside of ZeroNet (For quick check hash does not matter only file size) - site.storage.open("testfile1", "wb").write("A" * 1234) - site.storage.open("testfile2", "wb").write("B" * 2345) + site.storage.open("testfile1", "wb").write(b"A" * 1234) + site.storage.open("testfile2", "wb").write(b"B" * 2345) site.storage.verifyFiles(quick_check=True) @@ -129,7 +121,6 @@ class TestOptionalManager: assert site.bad_files["data/fake_bigfile.mp4|2048-3064"] == 1 def testOptionalDelete(self, site): - privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" contents = site.content_manager.contents site.content_manager.setPin("data/img/zerotalk-upvote.png", True) diff --git a/plugins/OptionalManager/UiWebsocketPlugin.py b/plugins/OptionalManager/UiWebsocketPlugin.py index 94d3f501..efdfdf9d 100644 --- a/plugins/OptionalManager/UiWebsocketPlugin.py +++ b/plugins/OptionalManager/UiWebsocketPlugin.py @@ -1,6 +1,6 @@ import re import time -import cgi +import html import gevent @@ -28,7 +28,7 @@ class UiWebsocketPlugin(object): content_db.my_optional_files[self.site.address + "/" + content_inner_dir] = time.time() if len(content_db.my_optional_files) > 50: # Keep only last 50 oldest_key = min( - content_db.my_optional_files.iterkeys(), + iter(content_db.my_optional_files.keys()), key=(lambda key: content_db.my_optional_files[key]) ) del content_db.my_optional_files[oldest_key] @@ -80,7 +80,7 @@ class UiWebsocketPlugin(object): # Add leech / seed stats row["peer_seed"] = 0 row["peer_leech"] = 0 - for peer in site.peers.itervalues(): + for peer in site.peers.values(): if not peer.time_piecefields_updated or sha512 not in peer.piecefields: continue peer_piecefield = peer.piecefields[sha512].tostring() @@ -212,7 +212,7 @@ class UiWebsocketPlugin(object): num_file = len(inner_path) if back == "ok": if num_file == 1: - self.cmd("notification", ["done", _["Pinned %s"] % cgi.escape(helper.getFilename(inner_path[0])), 5000]) + self.cmd("notification", ["done", _["Pinned %s"] % html.escape(helper.getFilename(inner_path[0])), 5000]) else: self.cmd("notification", ["done", _["Pinned %s files"] % num_file, 5000]) self.response(to, back) @@ -224,7 +224,7 @@ class UiWebsocketPlugin(object): num_file = len(inner_path) if back == "ok": if num_file == 1: - self.cmd("notification", ["done", _["Removed pin from %s"] % cgi.escape(helper.getFilename(inner_path[0])), 5000]) + self.cmd("notification", ["done", _["Removed pin from %s"] % html.escape(helper.getFilename(inner_path[0])), 5000]) else: self.cmd("notification", ["done", _["Removed pin from %s files"] % num_file, 5000]) self.response(to, back) @@ -325,7 +325,7 @@ class UiWebsocketPlugin(object): self.cmd("notification", [ "done", _["You started to help distribute %s.
Directory: %s"] % - (cgi.escape(title), cgi.escape(directory)), + (html.escape(title), html.escape(directory)), 10000 ]) @@ -369,10 +369,10 @@ class UiWebsocketPlugin(object): self.cmd( "confirm", [ - _["Help distribute all new optional files on site %s"] % cgi.escape(site_title), + _["Help distribute all new optional files on site %s"] % html.escape(site_title), _["Yes, I want to help!"] ], - lambda (res): self.cbOptionalHelpAll(to, site, True) + lambda res: self.cbOptionalHelpAll(to, site, True) ) else: site.settings["autodownloadoptional"] = False diff --git a/plugins/OptionalManager/__init__.py b/plugins/OptionalManager/__init__.py index 02969bba..1f0ad2dd 100644 --- a/plugins/OptionalManager/__init__.py +++ b/plugins/OptionalManager/__init__.py @@ -1 +1 @@ -import OptionalManagerPlugin \ No newline at end of file +from . import OptionalManagerPlugin \ No newline at end of file diff --git a/plugins/PeerDb/PeerDbPlugin.py b/plugins/PeerDb/PeerDbPlugin.py index 241b5c58..86613fc7 100644 --- a/plugins/PeerDb/PeerDbPlugin.py +++ b/plugins/PeerDb/PeerDbPlugin.py @@ -96,8 +96,8 @@ class ContentDbPlugin(object): gevent.spawn_later(60*60, self.savePeers, site, spawn=True) def saveAllPeers(self): - for site in self.sites.values(): + for site in list(self.sites.values()): try: self.savePeers(site) - except Exception, err: + except Exception as err: site.log.error("Save peer error: %s" % err) diff --git a/plugins/PeerDb/__init__.py b/plugins/PeerDb/__init__.py index 967561dc..bc8c93b9 100644 --- a/plugins/PeerDb/__init__.py +++ b/plugins/PeerDb/__init__.py @@ -1,2 +1,2 @@ -import PeerDbPlugin +from . import PeerDbPlugin diff --git a/plugins/Sidebar/SidebarPlugin.py b/plugins/Sidebar/SidebarPlugin.py index c56a2cb4..039c32b3 100644 --- a/plugins/Sidebar/SidebarPlugin.py +++ b/plugins/Sidebar/SidebarPlugin.py @@ -1,14 +1,11 @@ import re import os -import cgi +import html import sys import math import time import json -try: - import cStringIO as StringIO -except: - import StringIO +import io import gevent @@ -17,7 +14,7 @@ from Plugin import PluginManager from Debug import Debug from Translate import Translate from util import helper -from ZipStream import ZipStream +from .ZipStream import ZipStream plugin_dir = "plugins/Sidebar" media_dir = plugin_dir + "/media" @@ -46,7 +43,7 @@ class UiRequestPlugin(object): from Debug import DebugMedia DebugMedia.merge(plugin_media_file) if ext == "js": - yield _.translateData(open(plugin_media_file).read()) + yield _.translateData(open(plugin_media_file).read()).encode("utf8") else: for part in self.actionFile(plugin_media_file, send_header=False): yield part @@ -84,15 +81,13 @@ class UiRequestPlugin(object): yield data - - @PluginManager.registerTo("UiWebsocket") class UiWebsocketPlugin(object): def sidebarRenderPeerStats(self, body, site): - connected = len([peer for peer in site.peers.values() if peer.connection and peer.connection.connected]) - connectable = len([peer_id for peer_id in site.peers.keys() if not peer_id.endswith(":0")]) - onion = len([peer_id for peer_id in site.peers.keys() if ".onion" in peer_id]) - local = len([peer for peer in site.peers.values() if helper.isPrivateIp(peer.ip)]) + connected = len([peer for peer in list(site.peers.values()) if peer.connection and peer.connection.connected]) + connectable = len([peer_id for peer_id in list(site.peers.keys()) if not peer_id.endswith(":0")]) + onion = len([peer_id for peer_id in list(site.peers.keys()) if ".onion" in peer_id]) + local = len([peer for peer in list(site.peers.values()) if helper.isPrivateIp(peer.ip)]) peers_total = len(site.peers) # Add myself @@ -111,7 +106,7 @@ class UiWebsocketPlugin(object): percent_connectable = percent_connected = percent_onion = 0 if local: - local_html = _(u"
  • {_[Local]}:{local}
  • ") + local_html = _("
  • {_[Local]}:{local}
  • ") else: local_html = "" @@ -122,7 +117,7 @@ class UiWebsocketPlugin(object): ",".join(peer_ips) ) - body.append(_(u""" + body.append(_("""
  • ") @@ -272,9 +267,9 @@ class UiWebsocketPlugin(object): size_limit = site.getSizeLimit() percent_used = size / size_limit - body.append(_(u""" + body.append(_("""
  • - + MB {_[Set]}
  • @@ -292,7 +287,7 @@ class UiWebsocketPlugin(object): size_formatted_total = size_total / 1024 / 1024 size_formatted_downloaded = size_downloaded / 1024 / 1024 - body.append(_(u""" + body.append(_("""