Change to Python3 coding style

pull/1925/head
shortcutme 4 years ago
parent fc0fe0557b
commit b0b9a4d33c
No known key found for this signature in database
GPG Key ID: 5B63BAE6CB9613AE
  1. 6
      plugins/AnnounceLocal/AnnounceLocalPlugin.py
  2. 8
      plugins/AnnounceLocal/Test/TestAnnounce.py
  3. 2
      plugins/AnnounceLocal/__init__.py
  4. 8
      plugins/AnnounceShare/AnnounceSharePlugin.py
  5. 1
      plugins/AnnounceShare/Test/TestAnnounceShare.py
  6. 2
      plugins/AnnounceShare/__init__.py
  7. 2
      plugins/AnnounceZero/AnnounceZeroPlugin.py
  8. 2
      plugins/AnnounceZero/__init__.py
  9. 30
      plugins/Bigfile/BigfilePiecefield.py
  10. 35
      plugins/Bigfile/BigfilePlugin.py
  11. 56
      plugins/Bigfile/Test/TestBigfile.py
  12. 4
      plugins/Bigfile/__init__.py
  13. 34
      plugins/Chart/ChartCollector.py
  14. 6
      plugins/Chart/ChartPlugin.py
  15. 2
      plugins/Chart/__init__.py
  16. 18
      plugins/ContentFilter/ContentFilterPlugin.py
  17. 2
      plugins/ContentFilter/ContentFilterStorage.py
  18. 2
      plugins/ContentFilter/__init__.py
  19. 6
      plugins/Cors/CorsPlugin.py
  20. 2
      plugins/Cors/__init__.py
  21. 4
      plugins/CryptMessage/CryptMessage.py
  22. 4
      plugins/CryptMessage/CryptMessagePlugin.py
  23. 2
      plugins/CryptMessage/__init__.py
  24. 2
      plugins/FilePack/FilePackPlugin.py
  25. 2
      plugins/FilePack/__init__.py
  26. 22
      plugins/MergerSite/MergerSitePlugin.py
  27. 2
      plugins/MergerSite/__init__.py
  28. 12
      plugins/Newsfeed/NewsfeedPlugin.py
  29. 2
      plugins/Newsfeed/__init__.py
  30. 16
      plugins/OptionalManager/ContentDbPlugin.py
  31. 10
      plugins/OptionalManager/OptionalManagerPlugin.py
  32. 15
      plugins/OptionalManager/Test/TestOptionalManager.py
  33. 16
      plugins/OptionalManager/UiWebsocketPlugin.py
  34. 2
      plugins/OptionalManager/__init__.py
  35. 4
      plugins/PeerDb/PeerDbPlugin.py
  36. 2
      plugins/PeerDb/__init__.py
  37. 106
      plugins/Sidebar/SidebarPlugin.py
  38. 8
      plugins/Sidebar/ZipStream.py
  39. 2
      plugins/Sidebar/__init__.py
  40. 2
      plugins/Stats/__init__.py
  41. 16
      plugins/TranslateSite/TranslateSitePlugin.py
  42. 2
      plugins/TranslateSite/__init__.py
  43. 6
      plugins/Trayicon/TrayiconPlugin.py
  44. 2
      plugins/Trayicon/__init__.py
  45. 2
      plugins/Trayicon/languages/es.json
  46. 70
      plugins/Trayicon/lib/notificationicon.py
  47. 7
      plugins/Trayicon/lib/winfolders.py
  48. 7
      plugins/UiConfig/UiConfigPlugin.py
  49. 2
      plugins/UiConfig/__init__.py
  50. 6
      plugins/disabled-Bootstrapper/BootstrapperPlugin.py
  51. 2
      plugins/disabled-Bootstrapper/Test/TestBootstrapper.py
  52. 2
      plugins/disabled-Bootstrapper/__init__.py
  53. 4
      plugins/disabled-Dnschain/SiteManagerPlugin.py
  54. 2
      plugins/disabled-DonationMessage/__init__.py
  55. 6
      plugins/disabled-Multiuser/MultiuserPlugin.py
  56. 2
      plugins/disabled-Multiuser/__init__.py
  57. 22
      plugins/disabled-StemPort/StemPortPlugin.py
  58. 20
      plugins/disabled-StemPort/__init__.py
  59. 2
      plugins/disabled-UiPassword/__init__.py
  60. 2
      plugins/disabled-Zeroname-local/SiteManagerPlugin.py
  61. 4
      plugins/disabled-Zeroname-local/__init__.py
  62. 10
      plugins/disabled-Zeroname-local/bitcoinrpc/authproxy.py
  63. 2
      plugins/disabled-Zeroname-local/domainLookup.py
  64. 10
      src/Config.py
  65. 14
      src/Connection/Connection.py
  66. 10
      src/Connection/ConnectionServer.py
  67. 4
      src/Connection/__init__.py
  68. 6
      src/Content/ContentDb.py
  69. 20
      src/Content/ContentDbDict.py
  70. 67
      src/Content/ContentManager.py
  71. 2
      src/Content/__init__.py
  72. 8
      src/Crypt/CryptHash.py
  73. 2
      src/Crypt/CryptRsa.py
  74. 25
      src/Db/Db.py
  75. 6
      src/Db/DbQuery.py
  76. 6
      src/Db/__init__.py
  77. 18
      src/Debug/Debug.py
  78. 19
      src/Debug/DebugHook.py
  79. 29
      src/Debug/DebugMedia.py
  80. 1
      src/Debug/__init__.py
  81. 16
      src/File/FileRequest.py
  82. 18
      src/File/FileServer.py
  83. 4
      src/File/__init__.py
  84. 19
      src/Peer/Peer.py
  85. 4
      src/Peer/PeerHashfield.py
  86. 17
      src/Peer/PeerPortchecker.py
  87. 4
      src/Peer/__init__.py
  88. 23
      src/Plugin/PluginManager.py
  89. 48
      src/Site/Site.py
  90. 21
      src/Site/SiteAnnouncer.py
  91. 16
      src/Site/SiteManager.py
  92. 44
      src/Site/SiteStorage.py
  93. 6
      src/Site/__init__.py
  94. 14
      src/Test/BenchmarkSsl.py
  95. 8
      src/Test/Spy.py
  96. 20
      src/Test/TestContent.py
  97. 2
      src/Test/TestContentUser.py
  98. 12
      src/Test/TestDb.py
  99. 14
      src/Test/TestDiff.py
  100. 16
      src/Test/TestFileRequest.py
  101. Some files were not shown because too many files have changed in this diff Show More

@ -4,7 +4,7 @@ import gevent
from Plugin import PluginManager
from Config import config
import BroadcastServer
from . import BroadcastServer
@PluginManager.registerTo("SiteAnnouncer")
@ -42,7 +42,7 @@ class LocalAnnouncer(BroadcastServer.BroadcastServer):
if force: # Probably new site added, clean cache
self.known_peers = {}
for peer_id, known_peer in self.known_peers.items():
for peer_id, known_peer in list(self.known_peers.items()):
if time.time() - known_peer["found"] > 20 * 60:
del(self.known_peers[peer_id])
self.log.debug("Timeout, removing from known_peers: %s" % peer_id)
@ -78,7 +78,7 @@ class LocalAnnouncer(BroadcastServer.BroadcastServer):
def actionSiteListRequest(self, sender, params):
back = []
sites = self.server.sites.values()
sites = list(self.server.sites.values())
# Split adresses to group of 100 to avoid UDP size limit
site_groups = [sites[i:i + 100] for i in range(0, len(sites), 100)]

@ -85,10 +85,10 @@ class TestAnnounce:
def testPeerDiscover(self, announcer, announcer_remote, site):
assert announcer.server.peer_id != announcer_remote.server.peer_id
assert len(announcer.server.sites.values()[0].peers) == 0
assert len(list(announcer.server.sites.values())[0].peers) == 0
announcer.broadcast({"cmd": "discoverRequest"}, port=announcer_remote.listen_port)
time.sleep(0.1)
assert len(announcer.server.sites.values()[0].peers) == 1
assert len(list(announcer.server.sites.values())[0].peers) == 1
def testRecentPeerList(self, announcer, announcer_remote, site):
assert len(site.peers_recent) == 0
@ -101,13 +101,13 @@ class TestAnnounce:
assert len(site.peers) == 1
# It should update peer without siteListResponse
last_time_found = site.peers.values()[0].time_found
last_time_found = list(site.peers.values())[0].time_found
site.peers_recent.clear()
with Spy.Spy(announcer, "handleMessage") as responses:
announcer.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer_remote.listen_port)
time.sleep(0.1)
assert [response[1]["cmd"] for response in responses] == ["discoverResponse"]
assert len(site.peers_recent) == 1
assert site.peers.values()[0].time_found > last_time_found
assert list(site.peers.values())[0].time_found > last_time_found

@ -1 +1 @@
import AnnounceLocalPlugin
from . import AnnounceLocalPlugin

@ -75,7 +75,7 @@ class TrackerStorage(object):
def getWorkingTrackers(self, type="shared"):
trackers = {
key: tracker for key, tracker in self.getTrackers(type).iteritems()
key: tracker for key, tracker in self.getTrackers(type).items()
if tracker["time_success"] > time.time() - 60 * 60
}
return trackers
@ -95,7 +95,7 @@ class TrackerStorage(object):
trackers = self.getTrackers()
self.log.debug("Loaded %s shared trackers" % len(trackers))
for address, tracker in trackers.items():
for address, tracker in list(trackers.items()):
tracker["num_error"] = 0
if not address.startswith("zero://"):
del trackers[address]
@ -144,7 +144,7 @@ class SiteAnnouncerPlugin(object):
tracker_storage.time_discover = time.time()
gevent.spawn(tracker_storage.discoverTrackers, self.site.getConnectedPeers())
trackers = super(SiteAnnouncerPlugin, self).getTrackers()
shared_trackers = tracker_storage.getTrackers("shared").keys()
shared_trackers = list(tracker_storage.getTrackers("shared").keys())
if shared_trackers:
return trackers + shared_trackers
else:
@ -164,7 +164,7 @@ class SiteAnnouncerPlugin(object):
@PluginManager.registerTo("FileRequest")
class FileRequestPlugin(object):
def actionGetTrackers(self, params):
shared_trackers = tracker_storage.getWorkingTrackers("shared").keys()
shared_trackers = list(tracker_storage.getWorkingTrackers("shared").keys())
self.response({"trackers": shared_trackers})

@ -12,7 +12,6 @@ class TestAnnounceShare:
open("%s/trackers.json" % config.data_dir, "w").write("{}")
tracker_storage = AnnounceSharePlugin.tracker_storage
tracker_storage.load()
print tracker_storage.file_path, config.data_dir
peer = Peer(file_server.ip, 1544, connection_server=file_server)
assert peer.request("getTrackers")["trackers"] == []

@ -1 +1 @@
import AnnounceSharePlugin
from . import AnnounceSharePlugin

@ -119,7 +119,7 @@ class SiteAnnouncerPlugin(object):
onion = self.site.connection_server.tor_manager.getOnion(site.address)
publickey = self.site.connection_server.tor_manager.getPublickey(onion)
if publickey not in request["onion_signs"]:
sign = CryptRsa.sign(res["onion_sign_this"], self.site.connection_server.tor_manager.getPrivatekey(onion))
sign = CryptRsa.sign(res["onion_sign_this"].encode("utf8"), self.site.connection_server.tor_manager.getPrivatekey(onion))
request["onion_signs"][publickey] = sign
res = tracker_peer.request("announce", request)
if not res or "onion_sign_this" in res:

@ -1 +1 @@
import AnnounceZeroPlugin
from . import AnnounceZeroPlugin

@ -4,7 +4,7 @@ import array
def packPiecefield(data):
res = []
if not data:
return array.array("H", "")
return array.array("H", b"")
if data[0] == "0":
res.append(0)
@ -48,7 +48,7 @@ class BigfilePiecefield(object):
__slots__ = ["data"]
def __init__(self):
self.data = ""
self.data = b""
def fromstring(self, s):
self.data = s
@ -71,7 +71,7 @@ class BigfilePiecefield(object):
def __setitem__(self, key, value):
data = self.data
if len(data) < key:
data = data.ljust(key+1, "0")
data = data.ljust(key + 1, "0")
data = data[:key] + str(int(value)) + data[key + 1:]
self.data = data
@ -80,7 +80,7 @@ class BigfilePiecefieldPacked(object):
__slots__ = ["data"]
def __init__(self):
self.data = ""
self.data = b""
def fromstring(self, data):
self.data = packPiecefield(data).tostring()
@ -103,7 +103,7 @@ class BigfilePiecefieldPacked(object):
def __setitem__(self, key, value):
data = self.tostring()
if len(data) < key:
data = data.ljust(key+1, "0")
data = data.ljust(key + 1, "0")
data = data[:key] + str(int(value)) + data[key + 1:]
self.fromstring(data)
@ -116,7 +116,7 @@ if __name__ == "__main__":
meminfo = psutil.Process(os.getpid()).memory_info
for storage in [BigfilePiecefieldPacked, BigfilePiecefield]:
print "-- Testing storage: %s --" % storage
print("-- Testing storage: %s --" % storage))
m = meminfo()[0]
s = time.time()
piecefields = {}
@ -125,34 +125,34 @@ if __name__ == "__main__":
piecefield.fromstring(testdata[:i] + "0" + testdata[i + 1:])
piecefields[i] = piecefield
print "Create x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data))
print("Create x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data)))
m = meminfo()[0]
s = time.time()
for piecefield in piecefields.values():
for piecefield in list(piecefields.values()):
val = piecefield[1000]
print "Query one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s)
print("Query one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s))
m = meminfo()[0]
s = time.time()
for piecefield in piecefields.values():
for piecefield in list(piecefields.values()):
piecefield[1000] = True
print "Change one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s)
print("Change one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s))
m = meminfo()[0]
s = time.time()
for piecefield in piecefields.values():
for piecefield in list(piecefields.values()):
packed = piecefield.pack()
print "Pack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(packed))
print("Pack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(packed)))
m = meminfo()[0]
s = time.time()
for piecefield in piecefields.values():
for piecefield in list(piecefields.values()):
piecefield.unpack(packed)
print "Unpack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data))
print("Unpack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data)))
piecefields = {}

@ -5,7 +5,6 @@ import shutil
import collections
import math
import msgpack
import gevent
import gevent.lock
@ -15,7 +14,7 @@ from Crypt import CryptHash
from lib import merkletools
from util import helper
import util
from BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked
from .BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked
# We can only import plugin host clases after the plugins are loaded
@ -61,7 +60,7 @@ class UiRequestPlugin(object):
)
if len(piecemap_info["sha512_pieces"]) == 1: # Small file, don't split
hash = piecemap_info["sha512_pieces"][0].encode("hex")
hash = piecemap_info["sha512_pieces"][0].hex()
hash_id = site.content_manager.hashfield.getHashId(hash)
site.content_manager.optionalDownloaded(inner_path, hash_id, upload_info["size"], own=True)
@ -178,7 +177,7 @@ class UiWebsocketPlugin(object):
self.site.content_manager.loadContent(file_info["content_inner_path"], add_bad_files=False, force=True)
try:
self.site.storage.delete(piecemap_inner_path)
except Exception, err:
except Exception as err:
self.log.error("File %s delete error: %s" % (piecemap_inner_path, err))
return super(UiWebsocketPlugin, self).actionFileDelete(to, inner_path)
@ -324,7 +323,7 @@ class ContentManagerPlugin(object):
def verifyPiece(self, inner_path, pos, piece):
piecemap = self.getPiecemap(inner_path)
piece_i = pos / piecemap["piece_size"]
piece_i = int(pos / piecemap["piece_size"])
if CryptHash.sha512sum(piece, format="digest") != piecemap["sha512_pieces"][piece_i]:
raise VerifyError("Invalid hash")
return True
@ -345,7 +344,7 @@ class ContentManagerPlugin(object):
file_info = self.getFileInfo(inner_path)
# Mark piece downloaded
piece_i = pos_from / file_info["piece_size"]
piece_i = int(pos_from / file_info["piece_size"])
self.site.storage.piecefields[file_info["sha512"]][piece_i] = True
# Only add to site size on first request
@ -368,7 +367,7 @@ class ContentManagerPlugin(object):
del self.site.storage.piecefields[sha512]
# Also remove other pieces of the file from download queue
for key in self.site.bad_files.keys():
for key in list(self.site.bad_files.keys()):
if key.startswith(inner_path + "|"):
del self.site.bad_files[key]
self.site.worker_manager.removeSolvedFileTasks()
@ -381,9 +380,9 @@ class SiteStoragePlugin(object):
super(SiteStoragePlugin, self).__init__(*args, **kwargs)
self.piecefields = collections.defaultdict(BigfilePiecefield)
if "piecefields" in self.site.settings.get("cache", {}):
for sha512, piecefield_packed in self.site.settings["cache"].get("piecefields").iteritems():
for sha512, piecefield_packed in self.site.settings["cache"].get("piecefields").items():
if piecefield_packed:
self.piecefields[sha512].unpack(piecefield_packed.decode("base64"))
self.piecefields[sha512].unpack(base64.b64decode(piecefield_packed))
self.site.settings["cache"]["piecefields"] = {}
def createSparseFile(self, inner_path, size, sha512=None):
@ -486,7 +485,7 @@ class BigFile(object):
requests = []
# Request all required blocks
while 1:
piece_i = pos / self.piece_size
piece_i = int(pos / self.piece_size)
if piece_i * self.piece_size >= read_until:
break
pos_from = piece_i * self.piece_size
@ -503,7 +502,7 @@ class BigFile(object):
prebuffer_until = min(self.size, read_until + self.prebuffer)
priority = 3
while 1:
piece_i = pos / self.piece_size
piece_i = int(pos / self.piece_size)
if piece_i * self.piece_size >= prebuffer_until:
break
pos_from = piece_i * self.piece_size
@ -565,7 +564,7 @@ class WorkerManagerPlugin(object):
inner_path, file_range = inner_path.split("|")
pos_from, pos_to = map(int, file_range.split("-"))
task["piece_i"] = pos_from / file_info["piece_size"]
task["piece_i"] = int(pos_from / file_info["piece_size"])
task["sha512"] = file_info["sha512"]
else:
if inner_path in self.site.bad_files:
@ -601,10 +600,10 @@ class WorkerManagerPlugin(object):
class FileRequestPlugin(object):
def isReadable(self, site, inner_path, file, pos):
# Peek into file
if file.read(10) == "\0" * 10:
if file.read(10) == b"\0" * 10:
# Looks empty, but makes sures we don't have that piece
file_info = site.content_manager.getFileInfo(inner_path)
piece_i = pos / file_info["piece_size"]
piece_i = int(pos / file_info["piece_size"])
if not site.storage.piecefields[file_info["sha512"]][piece_i]:
return False
# Seek back to position we want to read
@ -622,7 +621,7 @@ class FileRequestPlugin(object):
if not peer.connection: # Just added
peer.connect(self.connection) # Assign current connection to peer
piecefields_packed = {sha512: piecefield.pack() for sha512, piecefield in site.storage.piecefields.iteritems()}
piecefields_packed = {sha512: piecefield.pack() for sha512, piecefield in site.storage.piecefields.items()}
self.response({"piecefields_packed": piecefields_packed})
def actionSetPiecefields(self, params):
@ -638,7 +637,7 @@ class FileRequestPlugin(object):
peer.connect(self.connection)
peer.piecefields = collections.defaultdict(BigfilePiecefieldPacked)
for sha512, piecefield_packed in params["piecefields_packed"].iteritems():
for sha512, piecefield_packed in params["piecefields_packed"].items():
peer.piecefields[sha512].unpack(piecefield_packed)
site.settings["has_bigfile"] = True
@ -673,7 +672,7 @@ class PeerPlugin(object):
self.piecefields = collections.defaultdict(BigfilePiecefieldPacked)
try:
for sha512, piecefield_packed in res["piecefields_packed"].iteritems():
for sha512, piecefield_packed in res["piecefields_packed"].items():
self.piecefields[sha512].unpack(piecefield_packed)
except Exception as err:
self.log("Invalid updatePiecefields response: %s" % Debug.formatException(err))
@ -720,7 +719,7 @@ class SitePlugin(object):
def getSettingsCache(self):
back = super(SitePlugin, self).getSettingsCache()
if self.storage.piecefields:
back["piecefields"] = {sha512: piecefield.pack().encode("base64") for sha512, piecefield in self.storage.piecefields.iteritems()}
back["piecefields"] = {sha512: base64.b64encode(piecefield.pack()).decode("utf8") for sha512, piecefield in self.storage.piecefields.items()}
return back
def needFile(self, inner_path, *args, **kwargs):

@ -1,5 +1,5 @@
import time
from cStringIO import StringIO
import io
import pytest
import msgpack
@ -40,7 +40,7 @@ class TestBigfile:
piecemap = msgpack.unpack(site.storage.open(file_node["piecemap"], "rb"))["optional.any.iso"]
assert len(piecemap["sha512_pieces"]) == 10
assert piecemap["sha512_pieces"][0] != piecemap["sha512_pieces"][1]
assert piecemap["sha512_pieces"][0].encode("hex") == "a73abad9992b3d0b672d0c2a292046695d31bebdcb1e150c8410bbe7c972eff3"
assert piecemap["sha512_pieces"][0].hex() == "a73abad9992b3d0b672d0c2a292046695d31bebdcb1e150c8410bbe7c972eff3"
def testVerifyPiece(self, site):
inner_path = self.createBigfile(site)
@ -48,7 +48,7 @@ class TestBigfile:
# Verify all 10 piece
f = site.storage.open(inner_path, "rb")
for i in range(10):
piece = StringIO(f.read(1024 * 1024))
piece = io.BytesIO(f.read(1024 * 1024))
piece.seek(0)
site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece)
f.close()
@ -57,7 +57,7 @@ class TestBigfile:
with pytest.raises(VerifyError) as err:
i = 1
f = site.storage.open(inner_path, "rb")
piece = StringIO(f.read(1024 * 1024))
piece = io.BytesIO(f.read(1024 * 1024))
f.close()
site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece)
assert "Invalid hash" in str(err)
@ -70,19 +70,19 @@ class TestBigfile:
# Write to file beginning
s = time.time()
f = site.storage.write("%s|%s-%s" % (inner_path, 0, 1024 * 1024), "hellostart" * 1024)
f = site.storage.write("%s|%s-%s" % (inner_path, 0, 1024 * 1024), b"hellostart" * 1024)
time_write_start = time.time() - s
# Write to file end
s = time.time()
f = site.storage.write("%s|%s-%s" % (inner_path, 99 * 1024 * 1024, 99 * 1024 * 1024 + 1024 * 1024), "helloend" * 1024)
f = site.storage.write("%s|%s-%s" % (inner_path, 99 * 1024 * 1024, 99 * 1024 * 1024 + 1024 * 1024), b"helloend" * 1024)
time_write_end = time.time() - s
# Verify writes
f = site.storage.open(inner_path)
assert f.read(10) == "hellostart"
assert f.read(10) == b"hellostart"
f.seek(99 * 1024 * 1024)
assert f.read(8) == "helloend"
assert f.read(8) == b"helloend"
f.close()
site.storage.delete(inner_path)
@ -105,7 +105,7 @@ class TestBigfile:
buff = peer_file_server.getFile(site_temp.address, "%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024))
assert len(buff.getvalue()) == 1 * 1024 * 1024 # Correct block size
assert buff.getvalue().startswith("Test524") # Correct data
assert buff.getvalue().startswith(b"Test524") # Correct data
buff.seek(0)
assert site.content_manager.verifyPiece(inner_path, 5 * 1024 * 1024, buff) # Correct hash
@ -147,12 +147,12 @@ class TestBigfile:
# Verify 0. block not downloaded
f = site_temp.storage.open(inner_path)
assert f.read(10) == "\0" * 10
assert f.read(10) == b"\0" * 10
# Verify 5. and 10. block downloaded
f.seek(5 * 1024 * 1024)
assert f.read(7) == "Test524"
assert f.read(7) == b"Test524"
f.seek(9 * 1024 * 1024)
assert f.read(7) == "943---T"
assert f.read(7) == b"943---T"
# Verify hashfield
assert set(site_temp.content_manager.hashfield) == set([18343, 30970]) # 18343: data/optional.any.iso, 30970: data/optional.any.iso.hashmap.msgpack
@ -178,14 +178,14 @@ class TestBigfile:
with site_temp.storage.openBigfile(inner_path) as f:
with Spy.Spy(FileRequest, "route") as requests:
f.seek(5 * 1024 * 1024)
assert f.read(7) == "Test524"
assert f.read(7) == b"Test524"
f.seek(9 * 1024 * 1024)
assert f.read(7) == "943---T"
assert f.read(7) == b"943---T"
assert len(requests) == 4 # 1x peicemap + 1x getpiecefield + 2x for pieces
assert set(site_temp.content_manager.hashfield) == set([18343, 30970])
assert set(site_temp.content_manager.hashfield) == set([18343, 43727])
assert site_temp.storage.piecefields[f.sha512].tostring() == "0000010001"
assert f.sha512 in site_temp.getSettingsCache()["piecefields"]
@ -193,7 +193,7 @@ class TestBigfile:
# Test requesting already downloaded
with Spy.Spy(FileRequest, "route") as requests:
f.seek(5 * 1024 * 1024)
assert f.read(7) == "Test524"
assert f.read(7) == b"Test524"
assert len(requests) == 0
@ -201,9 +201,9 @@ class TestBigfile:
with Spy.Spy(FileRequest, "route") as requests:
f.seek(5 * 1024 * 1024) # We already have this block
data = f.read(1024 * 1024 * 3) # Our read overflow to 6. and 7. block
assert data.startswith("Test524")
assert data.endswith("Test838-")
assert "\0" not in data # No null bytes allowed
assert data.startswith(b"Test524")
assert data.endswith(b"Test838-")
assert b"\0" not in data # No null bytes allowed
assert len(requests) == 2 # Two block download
@ -258,11 +258,11 @@ class TestBigfile:
# Download second block
with site_temp.storage.openBigfile(inner_path) as f:
f.seek(1024 * 1024)
assert f.read(1024)[0] != "\0"
assert f.read(1024)[0:1] != b"\0"
# Make sure first block not download
with site_temp.storage.open(inner_path) as f:
assert f.read(1024)[0] == "\0"
assert f.read(1024)[0:1] == b"\0"
peer2 = site.addPeer(file_server.ip, 1545, return_peer=True)
@ -284,8 +284,8 @@ class TestBigfile:
s = time.time()
for i in range(25000):
site.addPeer(file_server.ip, i)
print "%.3fs MEM: + %sKB" % (time.time() - s, (meminfo()[0] - mem_s) / 1024) # 0.082s MEM: + 6800KB
print site.peers.values()[0].piecefields
print("%.3fs MEM: + %sKB" % (time.time() - s, (meminfo()[0] - mem_s) / 1024)) # 0.082s MEM: + 6800KB
print(list(site.peers.values())[0].piecefields)
def testUpdatePiecefield(self, file_server, site, site_temp):
inner_path = self.createBigfile(site)
@ -390,16 +390,16 @@ class TestBigfile:
size_bigfile = site_temp.content_manager.getFileInfo(inner_path)["size"]
with site_temp.storage.openBigfile(inner_path) as f:
assert "\0" not in f.read(1024)
assert b"\0" not in f.read(1024)
assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
with site_temp.storage.openBigfile(inner_path) as f:
# Don't count twice
assert "\0" not in f.read(1024)
assert b"\0" not in f.read(1024)
assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
# Add second block
assert "\0" not in f.read(1024 * 1024)
assert b"\0" not in f.read(1024 * 1024)
assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile
def testPrebuffer(self, file_server, site, site_temp):
@ -423,7 +423,7 @@ class TestBigfile:
with site_temp.storage.openBigfile(inner_path, prebuffer=1024 * 1024 * 2) as f:
with Spy.Spy(FileRequest, "route") as requests:
f.seek(5 * 1024 * 1024)
assert f.read(7) == "Test524"
assert f.read(7) == b"Test524"
# assert len(requests) == 3 # 1x piecemap + 1x getpiecefield + 1x for pieces
assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 2
@ -434,7 +434,7 @@ class TestBigfile:
# No prebuffer beyond end of the file
f.seek(9 * 1024 * 1024)
assert "\0" not in f.read(7)
assert b"\0" not in f.read(7)
assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 0

@ -1,2 +1,2 @@
import BigfilePlugin
from BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked
from . import BigfilePlugin
from .BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked

@ -29,7 +29,7 @@ class ChartCollector(object):
sites = file_server.sites
if not sites:
return collectors
content_db = sites.values()[0].content_manager.contents.db
content_db = list(sites.values())[0].content_manager.contents.db
# Connection stats
collectors["connection"] = lambda: len(file_server.connections)
@ -67,8 +67,8 @@ class ChartCollector(object):
collectors["optional_downloaded"] = lambda: sum([site.settings.get("optional_downloaded", 0) for site in sites.values()])
# Peers
collectors["peer"] = lambda (peers): len(peers)
collectors["peer_onion"] = lambda (peers): len([True for peer in peers if ".onion" in peer])
collectors["peer"] = lambda peers: len(peers)
collectors["peer_onion"] = lambda peers: len([True for peer in peers if ".onion" in peer])
# Size
collectors["size"] = lambda: sum([site.settings.get("size", 0) for site in sites.values()])
@ -81,21 +81,21 @@ class ChartCollector(object):
site_collectors = {}
# Size
site_collectors["site_size"] = lambda(site): site.settings.get("size", 0)
site_collectors["site_size_optional"] = lambda(site): site.settings.get("size_optional", 0)
site_collectors["site_optional_downloaded"] = lambda(site): site.settings.get("optional_downloaded", 0)
site_collectors["site_content"] = lambda(site): len(site.content_manager.contents)
site_collectors["site_size"] = lambda site: site.settings.get("size", 0)
site_collectors["site_size_optional"] = lambda site: site.settings.get("size_optional", 0)
site_collectors["site_optional_downloaded"] = lambda site: site.settings.get("optional_downloaded", 0)
site_collectors["site_content"] = lambda site: len(site.content_manager.contents)
# Data transfer
site_collectors["site_bytes_recv|change"] = lambda(site): site.settings.get("bytes_recv", 0)
site_collectors["site_bytes_sent|change"] = lambda(site): site.settings.get("bytes_sent", 0)
site_collectors["site_bytes_recv|change"] = lambda site: site.settings.get("bytes_recv", 0)
site_collectors["site_bytes_sent|change"] = lambda site: site.settings.get("bytes_sent", 0)
# Peers
site_collectors["site_peer"] = lambda(site): len(site.peers)
site_collectors["site_peer_onion"] = lambda(site): len(
[True for peer in site.peers.itervalues() if peer.ip.endswith(".onion")]
site_collectors["site_peer"] = lambda site: len(site.peers)
site_collectors["site_peer_onion"] = lambda site: len(
[True for peer in site.peers.values() if peer.ip.endswith(".onion")]
)
site_collectors["site_peer_connected"] = lambda(site): len([True for peer in site.peers.itervalues() if peer.connection])
site_collectors["site_peer_connected"] = lambda site: len([True for peer in site.peers.values() if peer.connection])
return site_collectors
@ -109,7 +109,7 @@ class ChartCollector(object):
if site is None:
peers = self.getUniquePeers()
datas = {}
for key, collector in collectors.iteritems():
for key, collector in collectors.items():
try:
if site:
value = collector(site)
@ -138,7 +138,7 @@ class ChartCollector(object):
s = time.time()
datas = self.collectDatas(collectors, last_values["global"])
values = []
for key, value in datas.iteritems():
for key, value in datas.items():
values.append((self.db.getTypeId(key), value, now))
self.log.debug("Global collectors done in %.3fs" % (time.time() - s))
@ -154,9 +154,9 @@ class ChartCollector(object):
now = int(time.time())
s = time.time()
values = []
for address, site in sites.iteritems():
for address, site in sites.items():
site_datas = self.collectDatas(collectors, last_values["site:%s" % address], site)
for key, value in site_datas.iteritems():
for key, value in site_datas.items():
values.append((self.db.getTypeId(key), self.db.getSiteId(address), value, now))
time.sleep(0.000001)
self.log.debug("Site collections done in %.3fs" % (time.time() - s))

@ -6,8 +6,8 @@ import gevent
from Config import config
from util import helper
from Plugin import PluginManager
from ChartDb import ChartDb
from ChartCollector import ChartCollector
from .ChartDb import ChartDb
from .ChartCollector import ChartCollector
if "db" not in locals().keys(): # Share on reloads
db = ChartDb()
@ -39,7 +39,7 @@ class UiWebsocketPlugin(object):
if not query.strip().upper().startswith("SELECT"):
raise Exception("Only SELECT query supported")
res = db.execute(query, params)
except Exception, err: # Response the error to client
except Exception as err: # Response the error to client
self.log.error("ChartDbQuery error: %s" % err)
return {"error": str(err)}
# Convert result to dict

@ -1 +1 @@
import ChartPlugin
from . import ChartPlugin

@ -1,13 +1,13 @@
import time
import re
import cgi
import html
import hashlib
from Plugin import PluginManager
from Translate import Translate
from Config import config
from ContentFilterStorage import ContentFilterStorage
from .ContentFilterStorage import ContentFilterStorage
if "_" not in locals():
@ -39,8 +39,8 @@ class UiWebsocketPlugin(object):
else:
self.cmd(
"confirm",
[_["Hide all content from <b>%s</b>?"] % cgi.escape(cert_user_id), _["Mute"]],
lambda (res): self.cbMuteAdd(to, auth_address, cert_user_id, reason)
[_["Hide all content from <b>%s</b>?"] % html.escape(cert_user_id), _["Mute"]],
lambda res: self.cbMuteAdd(to, auth_address, cert_user_id, reason)
)
def cbMuteRemove(self, to, auth_address):
@ -55,8 +55,8 @@ class UiWebsocketPlugin(object):
else:
self.cmd(
"confirm",
[_["Unmute <b>%s</b>?"] % cgi.escape(filter_storage.file_content["mutes"][auth_address]["cert_user_id"]), _["Unmute"]],
lambda (res): self.cbMuteRemove(to, auth_address)
[_["Unmute <b>%s</b>?"] % html.escape(filter_storage.file_content["mutes"][auth_address]["cert_user_id"]), _["Unmute"]],
lambda res: self.cbMuteRemove(to, auth_address)
)
def actionMuteList(self, to):
@ -101,13 +101,13 @@ class UiWebsocketPlugin(object):
else:
content = site.storage.loadJson(inner_path)
title = _["New shared global content filter: <b>%s</b> (%s sites, %s users)"] % (
cgi.escape(inner_path), len(content.get("siteblocks", {})), len(content.get("mutes", {}))
html.escape(inner_path), len(content.get("siteblocks", {})), len(content.get("mutes", {}))
)
self.cmd(
"confirm",
[title, "Add"],
lambda (res): self.cbFilterIncludeAdd(to, res, address, inner_path, description)
lambda res: self.cbFilterIncludeAdd(to, res, address, inner_path, description)
)
def cbFilterIncludeAdd(self, to, res, address, inner_path, description):
@ -189,7 +189,7 @@ class UiRequestPlugin(object):
address = self.server.site_manager.resolveDomain(address)
if address:
address_sha256 = "0x" + hashlib.sha256(address).hexdigest()
address_sha256 = "0x" + hashlib.sha256(address.encode("utf8")).hexdigest()
else:
address_sha256 = None

@ -62,7 +62,7 @@ class ContentFilterStorage(object):
)
continue
for key, val in content.iteritems():
for key, val in content.items():
if type(val) is not dict:
continue

@ -1 +1 @@
import ContentFilterPlugin
from . import ContentFilterPlugin

@ -1,5 +1,5 @@
import re
import cgi
import html
import copy
from Plugin import PluginManager
@ -78,8 +78,8 @@ class UiWebsocketPlugin(object):
self.cmd(
"confirm",
[_["This site requests <b>read</b> permission to: <b>%s</b>"] % cgi.escape(site_name), button_title],
lambda (res): self.cbCorsPermission(to, address)
[_["This site requests <b>read</b> permission to: <b>%s</b>"] % html.escape(site_name), button_title],
lambda res: self.cbCorsPermission(to, address)
)
def cbCorsPermission(self, to, address):

@ -1 +1 @@
import CorsPlugin
from . import CorsPlugin

@ -43,11 +43,11 @@ def getEcc(privatekey=None):
def toOpensslPrivatekey(privatekey):
privatekey_bin = btctools.encode_privkey(privatekey, "bin")
return '\x02\xca\x00\x20' + privatekey_bin
return b'\x02\xca\x00\x20' + privatekey_bin
def toOpensslPublickey(publickey):
publickey_bin = btctools.encode_pubkey(publickey, "bin")
publickey_bin = publickey_bin[1:]
publickey_openssl = '\x02\xca\x00 ' + publickey_bin[:32] + '\x00 ' + publickey_bin[32:]
publickey_openssl = b'\x02\xca\x00 ' + publickey_bin[:32] + b'\x00 ' + publickey_bin[32:]
return publickey_openssl

@ -3,9 +3,9 @@ import os
from Plugin import PluginManager
from Crypt import CryptBitcoin
from lib.pybitcointools import bitcoin as btctools
import lib.pybitcointools as btctools
import CryptMessage
from . import CryptMessage
@PluginManager.registerTo("UiWebsocket")

@ -1 +1 @@
import CryptMessagePlugin
from . import CryptMessagePlugin

@ -48,7 +48,7 @@ class UiRequestPlugin(object):
if ".zip/" in path or ".tar.gz/" in path:
file_obj = None
path_parts = self.parsePath(path)
file_path = u"%s/%s/%s" % (config.data_dir, path_parts["address"], path_parts["inner_path"].decode("utf8"))
file_path = "%s/%s/%s" % (config.data_dir, path_parts["address"], path_parts["inner_path"])
match = re.match("^(.*\.(?:tar.gz|tar.bz2|zip))/(.*)", file_path)
archive_path, path_within = match.groups()
if archive_path not in archive_cache:

@ -1 +1 @@
import FilePackPlugin
from . import FilePackPlugin

@ -66,7 +66,7 @@ class UiWebsocketPlugin(object):
self.cmd(
"confirm",
[_["Add <b>%s</b> new site?"] % len(addresses), "Add"],
lambda (res): self.cbMergerSiteAdd(to, addresses)
lambda res: self.cbMergerSiteAdd(to, addresses)
)
self.response(to, "ok")
@ -102,7 +102,7 @@ class UiWebsocketPlugin(object):
ret = {}
if not merger_types:
return self.response(to, {"error": "Not a merger site"})
for address, merged_type in merged_db.iteritems():
for address, merged_type in merged_db.items():
if merged_type not in merger_types:
continue # Site not for us
if query_site_info:
@ -215,7 +215,7 @@ class UiWebsocketPlugin(object):
if not re.match("^[A-Za-z0-9-]+$", merger_type):
raise Exception("Invalid merger_type: %s" % merger_type)
merged_sites = []
for address, merged_type in merged_db.iteritems():
for address, merged_type in merged_db.items():
if merged_type != merger_type:
continue
site = self.server.sites.get(address)
@ -253,18 +253,18 @@ class SiteStoragePlugin(object):
# Not a merger site, that's all
if not merger_types:
raise StopIteration
return
merged_sites = [
site_manager.sites[address]
for address, merged_type in merged_db.iteritems()
for address, merged_type in merged_db.items()
if merged_type in merger_types
]
found = 0
for merged_site in merged_sites:
self.log.debug("Loading merged site: %s" % merged_site)
merged_type = merged_db[merged_site.address]
for content_inner_path, content in merged_site.content_manager.contents.iteritems():
for content_inner_path, content in merged_site.content_manager.contents.items():
# content.json file itself
if merged_site.storage.isFile(content_inner_path): # Missing content.json file
merged_inner_path = "merged-%s/%s/%s" % (merged_type, merged_site.address, content_inner_path)
@ -273,7 +273,7 @@ class SiteStoragePlugin(object):
merged_site.log.error("[MISSING] %s" % content_inner_path)
# Data files in content.json
content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site
for file_relative_path in content.get("files", {}).keys() + content.get("files_optional", {}).keys():
for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()):
if not file_relative_path.endswith(".json"):
continue # We only interesed in json files
file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir
@ -285,7 +285,7 @@ class SiteStoragePlugin(object):
merged_site.log.error("[MISSING] %s" % file_inner_path)
found += 1
if found % 100 == 0:
time.sleep(0.000001) # Context switch to avoid UI block
time.sleep(0.001) # Context switch to avoid UI block
# Also notice merger sites on a merged site file change
def onUpdated(self, inner_path, file=None):
@ -339,11 +339,11 @@ class SiteManagerPlugin(object):
site_manager = self
if not self.sites:
return
for site in self.sites.itervalues():
for site in self.sites.values():
# Update merged sites
try:
merged_type = site.content_manager.contents.get("content.json", {}).get("merged_type")
except Exception, err:
except Exception as err:
self.log.error("Error loading site %s: %s" % (site.address, Debug.formatException(err)))
continue
if merged_type:
@ -368,7 +368,7 @@ class SiteManagerPlugin(object):
# Update merged to merger
if merged_type:
for merger_site in self.sites.itervalues():
for merger_site in self.sites.values():
if "Merger:" + merged_type in merger_site.settings["permissions"]:
if site.address not in merged_to_merger:
merged_to_merger[site.address] = []

@ -1 +1 @@
import MergerSitePlugin
from . import MergerSitePlugin

@ -37,7 +37,7 @@ class UiWebsocketPlugin(object):
total_s = time.time()
num_sites = 0
for address, site_data in self.user.sites.items():
for address, site_data in list(self.user.sites.items()):
feeds = site_data.get("follow")
if not feeds:
continue
@ -45,7 +45,7 @@ class UiWebsocketPlugin(object):
self.log.debug("Invalid feed for site %s" % address)
continue
num_sites += 1
for name, query_set in feeds.iteritems():
for name, query_set in feeds.items():
site = SiteManager.site_manager.get(address)
if not site or not site.storage.has_db:
continue
@ -78,7 +78,7 @@ class UiWebsocketPlugin(object):
for row in res:
row = dict(row)
if not isinstance(row["date_added"], (int, long, float, complex)):
if not isinstance(row["date_added"], (int, float, complex)):
self.log.debug("Invalid date_added from site %s: %r" % (address, row["date_added"]))
continue
if row["date_added"] > 1000000000000: # Formatted as millseconds
@ -116,7 +116,7 @@ class UiWebsocketPlugin(object):
search_text, filters = self.parseSearch(search)
for address, site in SiteManager.site_manager.list().iteritems():
for address, site in SiteManager.site_manager.list().items():
if not site.storage.has_db:
continue
@ -137,7 +137,7 @@ class UiWebsocketPlugin(object):
num_sites += 1
for name, query in feeds.iteritems():
for name, query in feeds.items():
s = time.time()
try:
db_query = DbQuery(query)
@ -162,7 +162,7 @@ class UiWebsocketPlugin(object):
db_query.parts["LIMIT"] = str(limit)
res = site.storage.query(str(db_query), params)
except Exception, err:
except Exception as err:
self.log.error("%s feed query %s error: %s" % (address, name, Debug.formatException(err)))
stats.