diff --git a/src/Test/BenchmarkSsl.py b/src/Test/BenchmarkSsl.py
new file mode 100644
index 00000000..06181b89
--- /dev/null
+++ b/src/Test/BenchmarkSsl.py
@@ -0,0 +1,162 @@
+#!/usr/bin/python2
+from gevent import monkey
+monkey.patch_all()
+import os
+import time
+import sys
+import socket
+import ssl
+sys.path.append(os.path.abspath("..")) # Imports relative to src dir
+
+import io as StringIO
+import gevent
+
+from gevent.server import StreamServer
+from gevent.pool import Pool
+from Config import config
+config.parse()
+from util import SslPatch
+
+# Server
+socks = []
+data = os.urandom(1024 * 100)
+data += "\n"
+
+
+def handle(sock_raw, addr):
+ socks.append(sock_raw)
+ sock = sock_raw
+ # sock = ctx.wrap_socket(sock, server_side=True)
+ # if sock_raw.recv( 1, gevent.socket.MSG_PEEK ) == "\x16":
+ # sock = gevent.ssl.wrap_socket(sock_raw, server_side=True, keyfile='key-cz.pem',
+ # certfile='cert-cz.pem', ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1)
+ # fp = os.fdopen(sock.fileno(), 'rb', 1024*512)
+ try:
+ while True:
+ line = sock.recv(16 * 1024)
+ if not line:
+ break
+ if line == "bye\n":
+ break
+ elif line == "gotssl\n":
+ sock.sendall("yes\n")
+ sock = gevent.ssl.wrap_socket(
+ sock_raw, server_side=True, keyfile='../../data/key-rsa.pem', certfile='../../data/cert-rsa.pem',
+ ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1
+ )
+ else:
+ sock.sendall(data)
+ except Exception as err:
+ print(err)
+ try:
+ sock.shutdown(gevent.socket.SHUT_WR)
+ sock.close()
+ except:
+ pass
+ socks.remove(sock_raw)
+
+pool = Pool(1000) # do not accept more than 10000 connections
+server = StreamServer(('127.0.0.1', 1234), handle)
+server.start()
+
+
+# Client
+
+
+total_num = 0
+total_bytes = 0
+clipher = None
+ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:ECDH+AES128:ECDHE-RSA-AES128-GCM-SHA256:AES128-GCM-SHA256:AES128-SHA256:AES128-SHA:HIGH:" + \
+ "!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK"
+
+# ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
+
+
+def getData():
+ global total_num, total_bytes, clipher
+ data = None
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ # sock = socket.ssl(s)
+ # sock = ssl.wrap_socket(sock)
+ sock.connect(("127.0.0.1", 1234))
+ # sock.do_handshake()
+ # clipher = sock.cipher()
+ sock.send("gotssl\n")
+ if sock.recv(128) == "yes\n":
+ sock = ssl.wrap_socket(sock, ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1)
+ sock.do_handshake()
+ clipher = sock.cipher()
+
+ for req in range(20):
+ sock.sendall("req\n")
+ buff = StringIO.StringIO()
+ data = sock.recv(16 * 1024)
+ buff.write(data)
+ if not data:
+ break
+ while not data.endswith("\n"):
+ data = sock.recv(16 * 1024)
+ if not data:
+ break
+ buff.write(data)
+ total_num += 1
+ total_bytes += buff.tell()
+ if not data:
+ print("No data")
+
+ sock.shutdown(gevent.socket.SHUT_WR)
+ sock.close()
+
+s = time.time()
+
+
+def info():
+ import psutil
+ import os
+ process = psutil.Process(os.getpid())
+ if "memory_info" in dir(process):
+ memory_info = process.memory_info
+ else:
+ memory_info = process.get_memory_info
+ while 1:
+ print(total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s, end=' ')
+ print("using", clipher, "Mem:", memory_info()[0] / float(2 ** 20))
+ time.sleep(1)
+
+gevent.spawn(info)
+
+for test in range(1):
+ clients = []
+ for i in range(500): # Thread
+ clients.append(gevent.spawn(getData))
+ gevent.joinall(clients)
+
+
+print(total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s)
+
+# Separate client/server process:
+# 10*10*100:
+# Raw: 10000 req 1000009 kbytes transfered in 5.39999985695
+# RSA 2048: 10000 req 1000009 kbytes transfered in 27.7890000343 using ('ECDHE-RSA-AES256-SHA', 'TLSv1/SSLv3', 256)
+# ECC: 10000 req 1000009 kbytes transfered in 26.1959998608 using ('ECDHE-ECDSA-AES256-SHA', 'TLSv1/SSLv3', 256)
+# ECC: 10000 req 1000009 kbytes transfered in 28.2410001755 using ('ECDHE-ECDSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 13.3828125
+#
+# 10*100*10:
+# Raw: 10000 req 1000009 kbytes transfered in 7.02700018883 Mem: 14.328125
+# RSA 2048: 10000 req 1000009 kbytes transfered in 44.8860001564 using ('ECDHE-RSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 20.078125
+# ECC: 10000 req 1000009 kbytes transfered in 37.9430000782 using ('ECDHE-ECDSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 20.0234375
+#
+# 1*100*100:
+# Raw: 10000 req 1000009 kbytes transfered in 4.64400005341 Mem: 14.06640625
+# RSA: 10000 req 1000009 kbytes transfered in 24.2300000191 using ('ECDHE-RSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 19.7734375
+# ECC: 10000 req 1000009 kbytes transfered in 22.8849999905 using ('ECDHE-ECDSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 17.8125
+# AES128: 10000 req 1000009 kbytes transfered in 21.2839999199 using ('AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 14.1328125
+# ECC+128: 10000 req 1000009 kbytes transfered in 20.496999979 using ('ECDHE-ECDSA-AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 14.40234375
+#
+#
+# Single process:
+# 1*100*100
+# RSA: 10000 req 1000009 kbytes transfered in 41.7899999619 using ('ECDHE-RSA-AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 26.91015625
+#
+# 10*10*100
+# RSA: 10000 req 1000009 kbytes transfered in 40.1640000343 using ('ECDHE-RSA-AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 14.94921875
diff --git a/src/Test/Spy.py b/src/Test/Spy.py
new file mode 100644
index 00000000..44422550
--- /dev/null
+++ b/src/Test/Spy.py
@@ -0,0 +1,23 @@
+import logging
+
+class Spy:
+ def __init__(self, obj, func_name):
+ self.obj = obj
+ self.__name__ = func_name
+ self.func_original = getattr(self.obj, func_name)
+ self.calls = []
+
+ def __enter__(self, *args, **kwargs):
+ logging.debug("Spy started")
+ def loggedFunc(cls, *args, **kwargs):
+ call = dict(enumerate(args, 1))
+ call[0] = cls
+ call.update(kwargs)
+ logging.debug("Spy call: %s" % call)
+ self.calls.append(call)
+ return self.func_original(cls, *args, **kwargs)
+ setattr(self.obj, self.__name__, loggedFunc)
+ return self.calls
+
+ def __exit__(self, *args, **kwargs):
+ setattr(self.obj, self.__name__, self.func_original)
\ No newline at end of file
diff --git a/src/Test/TestCached.py b/src/Test/TestCached.py
new file mode 100644
index 00000000..088962c0
--- /dev/null
+++ b/src/Test/TestCached.py
@@ -0,0 +1,59 @@
+import time
+
+from util import Cached
+
+
+class CachedObject:
+ def __init__(self):
+ self.num_called_add = 0
+ self.num_called_multiply = 0
+ self.num_called_none = 0
+
+ @Cached(timeout=1)
+ def calcAdd(self, a, b):
+ self.num_called_add += 1
+ return a + b
+
+ @Cached(timeout=1)
+ def calcMultiply(self, a, b):
+ self.num_called_multiply += 1
+ return a * b
+
+ @Cached(timeout=1)
+ def none(self):
+ self.num_called_none += 1
+ return None
+
+
+class TestCached:
+ def testNoneValue(self):
+ cached_object = CachedObject()
+ assert cached_object.none() is None
+ assert cached_object.none() is None
+ assert cached_object.num_called_none == 1
+ time.sleep(2)
+ assert cached_object.none() is None
+ assert cached_object.num_called_none == 2
+
+ def testCall(self):
+ cached_object = CachedObject()
+
+ assert cached_object.calcAdd(1, 2) == 3
+ assert cached_object.calcAdd(1, 2) == 3
+ assert cached_object.calcMultiply(1, 2) == 2
+ assert cached_object.calcMultiply(1, 2) == 2
+ assert cached_object.num_called_add == 1
+ assert cached_object.num_called_multiply == 1
+
+ assert cached_object.calcAdd(2, 3) == 5
+ assert cached_object.calcAdd(2, 3) == 5
+ assert cached_object.num_called_add == 2
+
+ assert cached_object.calcAdd(1, 2) == 3
+ assert cached_object.calcMultiply(2, 3) == 6
+ assert cached_object.num_called_add == 2
+ assert cached_object.num_called_multiply == 2
+
+ time.sleep(2)
+ assert cached_object.calcAdd(1, 2) == 3
+ assert cached_object.num_called_add == 3
diff --git a/src/Test/TestConfig.py b/src/Test/TestConfig.py
new file mode 100644
index 00000000..24084392
--- /dev/null
+++ b/src/Test/TestConfig.py
@@ -0,0 +1,31 @@
+import pytest
+
+import Config
+
+
+@pytest.mark.usefixtures("resetSettings")
+class TestConfig:
+ def testParse(self):
+ # Defaults
+ config_test = Config.Config("zeronet.py".split(" "))
+ config_test.parse(silent=True, parse_config=False)
+ assert not config_test.debug
+ assert not config_test.debug_socket
+
+ # Test parse command line with unknown parameters (ui_password)
+ config_test = Config.Config("zeronet.py --debug --debug_socket --ui_password hello".split(" "))
+ config_test.parse(silent=True, parse_config=False)
+ assert config_test.debug
+ assert config_test.debug_socket
+ with pytest.raises(AttributeError):
+ config_test.ui_password
+
+ # More complex test
+ args = "zeronet.py --unknown_arg --debug --debug_socket --ui_restrict 127.0.0.1 1.2.3.4 "
+ args += "--another_unknown argument --use_openssl False siteSign address privatekey --inner_path users/content.json"
+ config_test = Config.Config(args.split(" "))
+ config_test.parse(silent=True, parse_config=False)
+ assert config_test.debug
+ assert "1.2.3.4" in config_test.ui_restrict
+ assert not config_test.use_openssl
+ assert config_test.inner_path == "users/content.json"
diff --git a/src/Test/TestConnectionServer.py b/src/Test/TestConnectionServer.py
new file mode 100644
index 00000000..82ee605c
--- /dev/null
+++ b/src/Test/TestConnectionServer.py
@@ -0,0 +1,118 @@
+import time
+import socket
+import gevent
+
+import pytest
+import mock
+
+from Crypt import CryptConnection
+from Connection import ConnectionServer
+from Config import config
+
+
+@pytest.mark.usefixtures("resetSettings")
+class TestConnection:
+ def testIpv6(self, file_server6):
+ assert ":" in file_server6.ip
+
+ client = ConnectionServer(file_server6.ip, 1545)
+ connection = client.getConnection(file_server6.ip, 1544)
+
+ assert connection.ping()
+
+ # Close connection
+ connection.close()
+ client.stop()
+ time.sleep(0.01)
+ assert len(file_server6.connections) == 0
+
+ # Should not able to reach on ipv4 ip
+ with pytest.raises(socket.error) as err:
+ client = ConnectionServer("127.0.0.1", 1545)
+ connection = client.getConnection("127.0.0.1", 1544)
+
+ def testSslConnection(self, file_server):
+ client = ConnectionServer(file_server.ip, 1545)
+ assert file_server != client
+
+ # Connect to myself
+ with mock.patch('Config.config.ip_local', return_value=[]): # SSL not used for local ips
+ connection = client.getConnection(file_server.ip, 1544)
+
+ assert len(file_server.connections) == 1
+ assert connection.handshake
+ assert connection.crypt
+
+
+ # Close connection
+ connection.close("Test ended")
+ client.stop()
+ time.sleep(0.1)
+ assert len(file_server.connections) == 0
+ assert file_server.num_incoming == 2 # One for file_server fixture, one for the test
+
+ def testRawConnection(self, file_server):
+ client = ConnectionServer(file_server.ip, 1545)
+ assert file_server != client
+
+ # Remove all supported crypto
+ crypt_supported_bk = CryptConnection.manager.crypt_supported
+ CryptConnection.manager.crypt_supported = []
+
+ with mock.patch('Config.config.ip_local', return_value=[]): # SSL not used for local ips
+ connection = client.getConnection(file_server.ip, 1544)
+ assert len(file_server.connections) == 1
+ assert not connection.crypt
+
+ # Close connection
+ connection.close()
+ client.stop()
+ time.sleep(0.01)
+ assert len(file_server.connections) == 0
+
+ # Reset supported crypts
+ CryptConnection.manager.crypt_supported = crypt_supported_bk
+
+ def testPing(self, file_server, site):
+ client = ConnectionServer(file_server.ip, 1545)
+ connection = client.getConnection(file_server.ip, 1544)
+
+ assert connection.ping()
+
+ connection.close()
+ client.stop()
+
+ def testGetConnection(self, file_server):
+ client = ConnectionServer(file_server.ip, 1545)
+ connection = client.getConnection(file_server.ip, 1544)
+
+ # Get connection by ip/port
+ connection2 = client.getConnection(file_server.ip, 1544)
+ assert connection == connection2
+
+ # Get connection by peerid
+ assert not client.getConnection(file_server.ip, 1544, peer_id="notexists", create=False)
+ connection2 = client.getConnection(file_server.ip, 1544, peer_id=connection.handshake["peer_id"], create=False)
+ assert connection2 == connection
+
+ connection.close()
+ client.stop()
+
+ def testFloodProtection(self, file_server):
+ whitelist = file_server.whitelist # Save for reset
+ file_server.whitelist = [] # Disable 127.0.0.1 whitelist
+ client = ConnectionServer(file_server.ip, 1545)
+
+ # Only allow 6 connection in 1 minute
+ for reconnect in range(6):
+ connection = client.getConnection(file_server.ip, 1544)
+ assert connection.handshake
+ connection.close()
+
+ # The 7. one will timeout
+ with pytest.raises(gevent.Timeout):
+ with gevent.Timeout(0.1):
+ connection = client.getConnection(file_server.ip, 1544)
+
+ # Reset whitelist
+ file_server.whitelist = whitelist
diff --git a/src/Test/TestContent.py b/src/Test/TestContent.py
new file mode 100644
index 00000000..7e7ca1a5
--- /dev/null
+++ b/src/Test/TestContent.py
@@ -0,0 +1,273 @@
+import json
+import time
+import io
+
+import pytest
+
+from Crypt import CryptBitcoin
+from Content.ContentManager import VerifyError, SignError
+from util.SafeRe import UnsafePatternError
+
+
+@pytest.mark.usefixtures("resetSettings")
+class TestContent:
+ privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv"
+
+ def testInclude(self, site):
+ # Rules defined in parent content.json
+ rules = site.content_manager.getRules("data/test_include/content.json")
+
+ assert rules["signers"] == ["15ik6LeBWnACWfaika1xqGapRZ1zh3JpCo"] # Valid signer
+ assert rules["user_name"] == "test" # Extra data
+ assert rules["max_size"] == 20000 # Max size of files
+ assert not rules["includes_allowed"] # Don't allow more includes
+ assert rules["files_allowed"] == "data.json" # Allowed file pattern
+
+ # Valid signers for "data/test_include/content.json"
+ valid_signers = site.content_manager.getValidSigners("data/test_include/content.json")
+ assert "15ik6LeBWnACWfaika1xqGapRZ1zh3JpCo" in valid_signers # Extra valid signer defined in parent content.json
+ assert "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" in valid_signers # The site itself
+ assert len(valid_signers) == 2 # No more
+
+ # Valid signers for "data/users/content.json"
+ valid_signers = site.content_manager.getValidSigners("data/users/content.json")
+ assert "1LSxsKfC9S9TVXGGNSM3vPHjyW82jgCX5f" in valid_signers # Extra valid signer defined in parent content.json
+ assert "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" in valid_signers # The site itself
+ assert len(valid_signers) == 2
+
+ # Valid signers for root content.json
+ assert site.content_manager.getValidSigners("content.json") == ["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"]
+
+ def testInlcudeLimits(self, site, crypt_bitcoin_lib):
+ # Data validation
+ res = []
+ data_dict = {
+ "files": {
+ "data.json": {
+ "sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906",
+ "size": 505
+ }
+ },
+ "modified": time.time()
+ }
+
+ # Normal data
+ data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)}
+ data_json = json.dumps(data_dict).encode()
+ data = io.BytesIO(data_json)
+ assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
+
+ # Reset
+ del data_dict["signs"]
+
+ # Too large
+ data_dict["files"]["data.json"]["size"] = 200000 # Emulate 2MB sized data.json
+ data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)}
+ data = io.BytesIO(json.dumps(data_dict).encode())
+ with pytest.raises(VerifyError) as err:
+ site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
+ assert "Include too large" in str(err.value)
+
+ # Reset
+ data_dict["files"]["data.json"]["size"] = 505
+ del data_dict["signs"]
+
+ # Not allowed file
+ data_dict["files"]["notallowed.exe"] = data_dict["files"]["data.json"]
+ data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)}
+ data = io.BytesIO(json.dumps(data_dict).encode())
+ with pytest.raises(VerifyError) as err:
+ site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
+ assert "File not allowed" in str(err.value)
+
+ # Reset
+ del data_dict["files"]["notallowed.exe"]
+ del data_dict["signs"]
+
+ # Should work again
+ data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)}
+ data = io.BytesIO(json.dumps(data_dict).encode())
+ assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
+
+ @pytest.mark.parametrize("inner_path", ["content.json", "data/test_include/content.json", "data/users/content.json"])
+ def testSign(self, site, inner_path):
+ # Bad privatekey
+ with pytest.raises(SignError) as err:
+ site.content_manager.sign(inner_path, privatekey="5aaa3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMnaa", filewrite=False)
+ assert "Private key invalid" in str(err.value)
+
+ # Good privatekey
+ content = site.content_manager.sign(inner_path, privatekey=self.privatekey, filewrite=False)
+ content_old = site.content_manager.contents[inner_path] # Content before the sign
+ assert not content_old == content # Timestamp changed
+ assert site.address in content["signs"] # Used the site's private key to sign
+ if inner_path == "content.json":
+ assert len(content["files"]) == 17
+ elif inner_path == "data/test-include/content.json":
+ assert len(content["files"]) == 1
+ elif inner_path == "data/users/content.json":
+ assert len(content["files"]) == 0
+
+ # Everything should be same as before except the modified timestamp and the signs
+ assert (
+ {key: val for key, val in content_old.items() if key not in ["modified", "signs", "sign", "zeronet_version"]}
+ ==
+ {key: val for key, val in content.items() if key not in ["modified", "signs", "sign", "zeronet_version"]}
+ )
+
+ def testSignOptionalFiles(self, site):
+ for hash in list(site.content_manager.hashfield):
+ site.content_manager.hashfield.remove(hash)
+
+ assert len(site.content_manager.hashfield) == 0
+
+ site.content_manager.contents["content.json"]["optional"] = "((data/img/zero.*))"
+ content_optional = site.content_manager.sign(privatekey=self.privatekey, filewrite=False, remove_missing_optional=True)
+
+ del site.content_manager.contents["content.json"]["optional"]
+ content_nooptional = site.content_manager.sign(privatekey=self.privatekey, filewrite=False, remove_missing_optional=True)
+
+ assert len(content_nooptional.get("files_optional", {})) == 0 # No optional files if no pattern
+ assert len(content_optional["files_optional"]) > 0
+ assert len(site.content_manager.hashfield) == len(content_optional["files_optional"]) # Hashed optional files should be added to hashfield
+ assert len(content_nooptional["files"]) > len(content_optional["files"])
+
+ def testFileInfo(self, site):
+ assert "sha512" in site.content_manager.getFileInfo("index.html")
+ assert site.content_manager.getFileInfo("data/img/domain.png")["content_inner_path"] == "content.json"
+ assert site.content_manager.getFileInfo("data/users/hello.png")["content_inner_path"] == "data/users/content.json"
+ assert site.content_manager.getFileInfo("data/users/content.json")["content_inner_path"] == "data/users/content.json"
+ assert not site.content_manager.getFileInfo("notexist")
+
+ # Optional file
+ file_info_optional = site.content_manager.getFileInfo("data/optional.txt")
+ assert "sha512" in file_info_optional
+ assert file_info_optional["optional"] is True
+
+ # Not exists yet user content.json
+ assert "cert_signers" in site.content_manager.getFileInfo("data/users/unknown/content.json")
+
+ # Optional user file
+ file_info_optional = site.content_manager.getFileInfo("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
+ assert "sha512" in file_info_optional
+ assert file_info_optional["optional"] is True
+
+ def testVerify(self, site, crypt_bitcoin_lib):
+ inner_path = "data/test_include/content.json"
+ data_dict = site.storage.loadJson(inner_path)
+ data = io.BytesIO(json.dumps(data_dict).encode("utf8"))
+
+ # Re-sign
+ data_dict["signs"] = {
+ "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
+ }
+ assert site.content_manager.verifyFile(inner_path, data, ignore_same=False)
+
+ # Wrong address
+ data_dict["address"] = "Othersite"
+ del data_dict["signs"]
+ data_dict["signs"] = {
+ "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
+ }
+ data = io.BytesIO(json.dumps(data_dict).encode())
+ with pytest.raises(VerifyError) as err:
+ site.content_manager.verifyFile(inner_path, data, ignore_same=False)
+ assert "Wrong site address" in str(err.value)
+
+ # Wrong inner_path
+ data_dict["address"] = "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"
+ data_dict["inner_path"] = "content.json"
+ del data_dict["signs"]
+ data_dict["signs"] = {
+ "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
+ }
+ data = io.BytesIO(json.dumps(data_dict).encode())
+ with pytest.raises(VerifyError) as err:
+ site.content_manager.verifyFile(inner_path, data, ignore_same=False)
+ assert "Wrong inner_path" in str(err.value)
+
+ # Everything right again
+ data_dict["address"] = "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"
+ data_dict["inner_path"] = inner_path
+ del data_dict["signs"]
+ data_dict["signs"] = {
+ "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
+ }
+ data = io.BytesIO(json.dumps(data_dict).encode())
+ assert site.content_manager.verifyFile(inner_path, data, ignore_same=False)
+
+ def testVerifyInnerPath(self, site, crypt_bitcoin_lib):
+ inner_path = "content.json"
+ data_dict = site.storage.loadJson(inner_path)
+
+ for good_relative_path in ["data.json", "out/data.json", "Any File [by none] (1).jpg", "árvzítűrő/tükörfúrógép.txt"]:
+ data_dict["files"] = {good_relative_path: {"sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906", "size": 505}}
+
+ if "sign" in data_dict:
+ del data_dict["sign"]
+ del data_dict["signs"]
+ data_dict["signs"] = {
+ "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
+ }
+ data = io.BytesIO(json.dumps(data_dict).encode())
+ assert site.content_manager.verifyFile(inner_path, data, ignore_same=False)
+
+ for bad_relative_path in ["../data.json", "data/" * 100, "invalid|file.jpg", "con.txt", "any/con.txt"]:
+ data_dict["files"] = {bad_relative_path: {"sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906", "size": 505}}
+
+ if "sign" in data_dict:
+ del data_dict["sign"]
+ del data_dict["signs"]
+ data_dict["signs"] = {
+ "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
+ }
+ data = io.BytesIO(json.dumps(data_dict).encode())
+ with pytest.raises(VerifyError) as err:
+ site.content_manager.verifyFile(inner_path, data, ignore_same=False)
+ assert "Invalid relative path" in str(err.value)
+
+ @pytest.mark.parametrize("key", ["ignore", "optional"])
+ def testSignUnsafePattern(self, site, key):
+ site.content_manager.contents["content.json"][key] = "([a-zA-Z]+)*"
+ with pytest.raises(UnsafePatternError) as err:
+ site.content_manager.sign("content.json", privatekey=self.privatekey, filewrite=False)
+ assert "Potentially unsafe" in str(err.value)
+
+
+ def testVerifyUnsafePattern(self, site, crypt_bitcoin_lib):
+ site.content_manager.contents["content.json"]["includes"]["data/test_include/content.json"]["files_allowed"] = "([a-zA-Z]+)*"
+ with pytest.raises(UnsafePatternError) as err:
+ with site.storage.open("data/test_include/content.json") as data:
+ site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
+ assert "Potentially unsafe" in str(err.value)
+
+ site.content_manager.contents["data/users/content.json"]["user_contents"]["permission_rules"]["([a-zA-Z]+)*"] = {"max_size": 0}
+ with pytest.raises(UnsafePatternError) as err:
+ with site.storage.open("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json") as data:
+ site.content_manager.verifyFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", data, ignore_same=False)
+ assert "Potentially unsafe" in str(err.value)
+
+ def testPathValidation(self, site):
+ assert site.content_manager.isValidRelativePath("test.txt")
+ assert site.content_manager.isValidRelativePath("test/!@#$%^&().txt")
+ assert site.content_manager.isValidRelativePath("ÜøßÂŒƂÆÇ.txt")
+ assert site.content_manager.isValidRelativePath("тест.текст")
+ assert site.content_manager.isValidRelativePath("𝐮𝐧𝐢𝐜𝐨𝐝𝐞𝑖𝑠𝒂𝒘𝒆𝒔𝒐𝒎𝒆")
+
+ # Test rules based on https://stackoverflow.com/questions/1976007/what-characters-are-forbidden-in-windows-and-linux-directory-names
+
+ assert not site.content_manager.isValidRelativePath("any\\hello.txt") # \ not allowed
+ assert not site.content_manager.isValidRelativePath("/hello.txt") # Cannot start with /
+ assert not site.content_manager.isValidRelativePath("\\hello.txt") # Cannot start with \
+ assert not site.content_manager.isValidRelativePath("../hello.txt") # Not allowed .. in path
+ assert not site.content_manager.isValidRelativePath("\0hello.txt") # NULL character
+ assert not site.content_manager.isValidRelativePath("\31hello.txt") # 0-31 (ASCII control characters)
+ assert not site.content_manager.isValidRelativePath("any/hello.txt ") # Cannot end with space
+ assert not site.content_manager.isValidRelativePath("any/hello.txt.") # Cannot end with dot
+ assert site.content_manager.isValidRelativePath(".hello.txt") # Allow start with dot
+ assert not site.content_manager.isValidRelativePath("any/CON") # Protected names on Windows
+ assert not site.content_manager.isValidRelativePath("CON/any.txt")
+ assert not site.content_manager.isValidRelativePath("any/lpt1.txt")
+ assert site.content_manager.isValidRelativePath("any/CONAN")
+ assert not site.content_manager.isValidRelativePath("any/CONOUT$")
+ assert not site.content_manager.isValidRelativePath("a" * 256) # Max 255 characters allowed
diff --git a/src/Test/TestContentUser.py b/src/Test/TestContentUser.py
new file mode 100644
index 00000000..8e91dd3e
--- /dev/null
+++ b/src/Test/TestContentUser.py
@@ -0,0 +1,390 @@
+import json
+import io
+
+import pytest
+
+from Crypt import CryptBitcoin
+from Content.ContentManager import VerifyError, SignError
+
+
+@pytest.mark.usefixtures("resetSettings")
+class TestContentUser:
+ def testSigners(self, site):
+ # File info for not existing user file
+ file_info = site.content_manager.getFileInfo("data/users/notexist/data.json")
+ assert file_info["content_inner_path"] == "data/users/notexist/content.json"
+ file_info = site.content_manager.getFileInfo("data/users/notexist/a/b/data.json")
+ assert file_info["content_inner_path"] == "data/users/notexist/content.json"
+ valid_signers = site.content_manager.getValidSigners("data/users/notexist/content.json")
+ assert valid_signers == ["14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet", "notexist", "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"]
+
+ # File info for exsitsing user file
+ valid_signers = site.content_manager.getValidSigners("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json")
+ assert '1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT' in valid_signers # The site address
+ assert '14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet' in valid_signers # Admin user defined in data/users/content.json
+ assert '1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C' in valid_signers # The user itself
+ assert len(valid_signers) == 3 # No more valid signers
+
+ # Valid signer for banned user
+ user_content = site.storage.loadJson("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json")
+ user_content["cert_user_id"] = "bad@zeroid.bit"
+
+ valid_signers = site.content_manager.getValidSigners("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
+ assert '1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT' in valid_signers # The site address
+ assert '14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet' in valid_signers # Admin user defined in data/users/content.json
+ assert '1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C' not in valid_signers # The user itself
+
+ def testRules(self, site):
+ # We going to manipulate it this test rules based on data/users/content.json
+ user_content = site.storage.loadJson("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json")
+
+ # Known user
+ user_content["cert_auth_type"] = "web"
+ user_content["cert_user_id"] = "nofish@zeroid.bit"
+ rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
+ assert rules["max_size"] == 100000
+ assert "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" in rules["signers"]
+
+ # Unknown user
+ user_content["cert_auth_type"] = "web"
+ user_content["cert_user_id"] = "noone@zeroid.bit"
+ rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
+ assert rules["max_size"] == 10000
+ assert "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" in rules["signers"]
+
+ # User with more size limit based on auth type
+ user_content["cert_auth_type"] = "bitmsg"
+ user_content["cert_user_id"] = "noone@zeroid.bit"
+ rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
+ assert rules["max_size"] == 15000
+ assert "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" in rules["signers"]
+
+ # Banned user
+ user_content["cert_auth_type"] = "web"
+ user_content["cert_user_id"] = "bad@zeroid.bit"
+ rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
+ assert "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" not in rules["signers"]
+
+ def testRulesAddress(self, site):
+ user_inner_path = "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json"
+ user_content = site.storage.loadJson(user_inner_path)
+
+ rules = site.content_manager.getRules(user_inner_path, user_content)
+ assert rules["max_size"] == 10000
+ assert "1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9" in rules["signers"]
+
+ users_content = site.content_manager.contents["data/users/content.json"]
+
+ # Ban user based on address
+ users_content["user_contents"]["permissions"]["1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9"] = False
+ rules = site.content_manager.getRules(user_inner_path, user_content)
+ assert "1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9" not in rules["signers"]
+
+ # Change max allowed size
+ users_content["user_contents"]["permissions"]["1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9"] = {"max_size": 20000}
+ rules = site.content_manager.getRules(user_inner_path, user_content)
+ assert rules["max_size"] == 20000
+
+ def testVerifyAddress(self, site):
+ privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT
+ user_inner_path = "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json"
+ data_dict = site.storage.loadJson(user_inner_path)
+ users_content = site.content_manager.contents["data/users/content.json"]
+
+ data = io.BytesIO(json.dumps(data_dict).encode())
+ assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
+
+ # Test error on 15k data.json
+ data_dict["files"]["data.json"]["size"] = 1024 * 15
+ del data_dict["signs"] # Remove signs before signing
+ data_dict["signs"] = {
+ "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
+ }
+ data = io.BytesIO(json.dumps(data_dict).encode())
+ with pytest.raises(VerifyError) as err:
+ site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
+ assert "Include too large" in str(err.value)
+
+ # Give more space based on address
+ users_content["user_contents"]["permissions"]["1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9"] = {"max_size": 20000}
+ del data_dict["signs"] # Remove signs before signing
+ data_dict["signs"] = {
+ "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
+ }
+ data = io.BytesIO(json.dumps(data_dict).encode())
+ assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
+
+ def testVerify(self, site):
+ privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT
+ user_inner_path = "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json"
+ data_dict = site.storage.loadJson(user_inner_path)
+ users_content = site.content_manager.contents["data/users/content.json"]
+
+ data = io.BytesIO(json.dumps(data_dict).encode())
+ assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
+
+ # Test max size exception by setting allowed to 0
+ rules = site.content_manager.getRules(user_inner_path, data_dict)
+ assert rules["max_size"] == 10000
+ assert users_content["user_contents"]["permission_rules"][".*"]["max_size"] == 10000
+
+ users_content["user_contents"]["permission_rules"][".*"]["max_size"] = 0
+ rules = site.content_manager.getRules(user_inner_path, data_dict)
+ assert rules["max_size"] == 0
+ data = io.BytesIO(json.dumps(data_dict).encode())
+
+ with pytest.raises(VerifyError) as err:
+ site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
+ assert "Include too large" in str(err.value)
+ users_content["user_contents"]["permission_rules"][".*"]["max_size"] = 10000 # Reset
+
+ # Test max optional size exception
+ # 1 MB gif = Allowed
+ data_dict["files_optional"]["peanut-butter-jelly-time.gif"]["size"] = 1024 * 1024
+ del data_dict["signs"] # Remove signs before signing
+ data_dict["signs"] = {
+ "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
+ }
+ data = io.BytesIO(json.dumps(data_dict).encode())
+ assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
+
+ # 100 MB gif = Not allowed
+ data_dict["files_optional"]["peanut-butter-jelly-time.gif"]["size"] = 100 * 1024 * 1024
+ del data_dict["signs"] # Remove signs before signing
+ data_dict["signs"] = {
+ "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
+ }
+ data = io.BytesIO(json.dumps(data_dict).encode())
+ with pytest.raises(VerifyError) as err:
+ site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
+ assert "Include optional files too large" in str(err.value)
+ data_dict["files_optional"]["peanut-butter-jelly-time.gif"]["size"] = 1024 * 1024 # Reset
+
+ # hello.exe = Not allowed
+ data_dict["files_optional"]["hello.exe"] = data_dict["files_optional"]["peanut-butter-jelly-time.gif"]
+ del data_dict["signs"] # Remove signs before signing
+ data_dict["signs"] = {
+ "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
+ }
+ data = io.BytesIO(json.dumps(data_dict).encode())
+ with pytest.raises(VerifyError) as err:
+ site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
+ assert "Optional file not allowed" in str(err.value)
+ del data_dict["files_optional"]["hello.exe"] # Reset
+
+ # Includes not allowed in user content
+ data_dict["includes"] = {"other.json": {}}
+ del data_dict["signs"] # Remove signs before signing
+ data_dict["signs"] = {
+ "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
+ }
+ data = io.BytesIO(json.dumps(data_dict).encode())
+ with pytest.raises(VerifyError) as err:
+ site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
+ assert "Includes not allowed" in str(err.value)
+
+ def testCert(self, site):
+ # user_addr = "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C"
+ user_priv = "5Kk7FSA63FC2ViKmKLuBxk9gQkaQ5713hKq8LmFAf4cVeXh6K6A"
+ # cert_addr = "14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet"
+ cert_priv = "5JusJDSjHaMHwUjDT3o6eQ54pA6poo8La5fAgn1wNc3iK59jxjA"
+
+ # Check if the user file is loaded
+ assert "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json" in site.content_manager.contents
+ user_content = site.content_manager.contents["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"]
+ rules_content = site.content_manager.contents["data/users/content.json"]
+
+ # Override valid cert signers for the test
+ rules_content["user_contents"]["cert_signers"]["zeroid.bit"] = [
+ "14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet",
+ "1iD5ZQJMNXu43w1qLB8sfdHVKppVMduGz"
+ ]
+
+ # Check valid cert signers
+ rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
+ assert rules["cert_signers"] == {"zeroid.bit": [
+ "14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet",
+ "1iD5ZQJMNXu43w1qLB8sfdHVKppVMduGz"
+ ]}
+
+ # Sign a valid cert
+ user_content["cert_sign"] = CryptBitcoin.sign("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % (
+ user_content["cert_auth_type"],
+ user_content["cert_user_id"].split("@")[0]
+ ), cert_priv)
+
+ # Verify cert
+ assert site.content_manager.verifyCert("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
+
+ # Verify if the cert is valid for other address
+ assert not site.content_manager.verifyCert("data/users/badaddress/content.json", user_content)
+
+ # Sign user content
+ signed_content = site.content_manager.sign(
+ "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False
+ )
+
+ # Test user cert
+ assert site.content_manager.verifyFile(
+ "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
+ io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
+ )
+
+ # Test banned user
+ cert_user_id = user_content["cert_user_id"] # My username
+ site.content_manager.contents["data/users/content.json"]["user_contents"]["permissions"][cert_user_id] = False
+ with pytest.raises(VerifyError) as err:
+ site.content_manager.verifyFile(
+ "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
+ io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
+ )
+ assert "Valid signs: 0/1" in str(err.value)
+ del site.content_manager.contents["data/users/content.json"]["user_contents"]["permissions"][cert_user_id] # Reset
+
+ # Test invalid cert
+ user_content["cert_sign"] = CryptBitcoin.sign(
+ "badaddress#%s/%s" % (user_content["cert_auth_type"], user_content["cert_user_id"]), cert_priv
+ )
+ signed_content = site.content_manager.sign(
+ "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False
+ )
+ with pytest.raises(VerifyError) as err:
+ site.content_manager.verifyFile(
+ "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
+ io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
+ )
+ assert "Invalid cert" in str(err.value)
+
+ # Test banned user, signed by the site owner
+ user_content["cert_sign"] = CryptBitcoin.sign("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % (
+ user_content["cert_auth_type"],
+ user_content["cert_user_id"].split("@")[0]
+ ), cert_priv)
+ cert_user_id = user_content["cert_user_id"] # My username
+ site.content_manager.contents["data/users/content.json"]["user_contents"]["permissions"][cert_user_id] = False
+
+ site_privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT
+ del user_content["signs"] # Remove signs before signing
+ user_content["signs"] = {
+ "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(user_content, sort_keys=True), site_privatekey)
+ }
+ assert site.content_manager.verifyFile(
+ "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
+ io.BytesIO(json.dumps(user_content).encode()), ignore_same=False
+ )
+
+ def testMissingCert(self, site):
+ user_priv = "5Kk7FSA63FC2ViKmKLuBxk9gQkaQ5713hKq8LmFAf4cVeXh6K6A"
+ cert_priv = "5JusJDSjHaMHwUjDT3o6eQ54pA6poo8La5fAgn1wNc3iK59jxjA"
+
+ user_content = site.content_manager.contents["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"]
+ rules_content = site.content_manager.contents["data/users/content.json"]
+
+ # Override valid cert signers for the test
+ rules_content["user_contents"]["cert_signers"]["zeroid.bit"] = [
+ "14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet",
+ "1iD5ZQJMNXu43w1qLB8sfdHVKppVMduGz"
+ ]
+
+ # Sign a valid cert
+ user_content["cert_sign"] = CryptBitcoin.sign("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % (
+ user_content["cert_auth_type"],
+ user_content["cert_user_id"].split("@")[0]
+ ), cert_priv)
+ signed_content = site.content_manager.sign(
+ "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False
+ )
+
+ assert site.content_manager.verifyFile(
+ "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
+ io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
+ )
+
+ # Test invalid cert_user_id
+ user_content["cert_user_id"] = "nodomain"
+ user_content["signs"] = {
+ "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(user_content, sort_keys=True), user_priv)
+ }
+ signed_content = site.content_manager.sign(
+ "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False
+ )
+ with pytest.raises(VerifyError) as err:
+ site.content_manager.verifyFile(
+ "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
+ io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
+ )
+ assert "Invalid domain in cert_user_id" in str(err.value)
+
+ # Test removed cert
+ del user_content["cert_user_id"]
+ del user_content["cert_auth_type"]
+ del user_content["signs"] # Remove signs before signing
+ user_content["signs"] = {
+ "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(user_content, sort_keys=True), user_priv)
+ }
+ signed_content = site.content_manager.sign(
+ "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False
+ )
+ with pytest.raises(VerifyError) as err:
+ site.content_manager.verifyFile(
+ "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
+ io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
+ )
+ assert "Missing cert_user_id" in str(err.value)
+
+
+ def testCertSignersPattern(self, site):
+ user_priv = "5Kk7FSA63FC2ViKmKLuBxk9gQkaQ5713hKq8LmFAf4cVeXh6K6A"
+ cert_priv = "5JusJDSjHaMHwUjDT3o6eQ54pA6poo8La5fAgn1wNc3iK59jxjA" # For 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet
+
+ user_content = site.content_manager.contents["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"]
+ rules_content = site.content_manager.contents["data/users/content.json"]
+
+ # Override valid cert signers for the test
+ rules_content["user_contents"]["cert_signers_pattern"] = "14wgQ[0-9][A-Z]"
+
+ # Sign a valid cert
+ user_content["cert_user_id"] = "certuser@14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet"
+ user_content["cert_sign"] = CryptBitcoin.sign("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % (
+ user_content["cert_auth_type"],
+ "certuser"
+ ), cert_priv)
+ signed_content = site.content_manager.sign(
+ "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False
+ )
+
+ assert site.content_manager.verifyFile(
+ "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
+ io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
+ )
+
+ # Cert does not matches the pattern
+ rules_content["user_contents"]["cert_signers_pattern"] = "14wgX[0-9][A-Z]"
+
+ with pytest.raises(VerifyError) as err:
+ site.content_manager.verifyFile(
+ "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
+ io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
+ )
+ assert "Invalid cert signer: 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" in str(err.value)
+
+ # Removed cert_signers_pattern
+ del rules_content["user_contents"]["cert_signers_pattern"]
+
+ with pytest.raises(VerifyError) as err:
+ site.content_manager.verifyFile(
+ "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
+ io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
+ )
+ assert "Invalid cert signer: 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" in str(err.value)
+
+
+ def testNewFile(self, site):
+ privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT
+ inner_path = "data/users/1NEWrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"
+
+ site.storage.writeJson(inner_path, {"test": "data"})
+ site.content_manager.sign(inner_path, privatekey)
+ assert "test" in site.storage.loadJson(inner_path)
+
+ site.storage.delete(inner_path)
diff --git a/src/Test/TestCryptBitcoin.py b/src/Test/TestCryptBitcoin.py
new file mode 100644
index 00000000..2bc087b5
--- /dev/null
+++ b/src/Test/TestCryptBitcoin.py
@@ -0,0 +1,48 @@
+from Crypt import CryptBitcoin
+
+
+class TestCryptBitcoin:
+ def testSign(self, crypt_bitcoin_lib):
+ privatekey = "5K9S6dVpufGnroRgFrT6wsKiz2mJRYsC73eWDmajaHserAp3F1C"
+ privatekey_bad = "5Jbm9rrusXyApAoM8YoM4Rja337zMMoBUMRJ1uijiguU2aZRnwC"
+
+ # Get address by privatekey
+ address = crypt_bitcoin_lib.privatekeyToAddress(privatekey)
+ assert address == "1MpDMxFeDUkiHohxx9tbGLeEGEuR4ZNsJz"
+
+ address_bad = crypt_bitcoin_lib.privatekeyToAddress(privatekey_bad)
+ assert address_bad != "1MpDMxFeDUkiHohxx9tbGLeEGEuR4ZNsJz"
+
+ # Text signing
+ data_len_list = list(range(0, 300, 10))
+ data_len_list += [1024, 2048, 1024 * 128, 1024 * 1024, 1024 * 2048]
+ for data_len in data_len_list:
+ data = data_len * "!"
+ sign = crypt_bitcoin_lib.sign(data, privatekey)
+
+ assert crypt_bitcoin_lib.verify(data, address, sign)
+ assert not crypt_bitcoin_lib.verify("invalid" + data, address, sign)
+
+ # Signed by bad privatekey
+ sign_bad = crypt_bitcoin_lib.sign("hello", privatekey_bad)
+ assert not crypt_bitcoin_lib.verify("hello", address, sign_bad)
+
+ def testVerify(self, crypt_bitcoin_lib):
+ sign_uncompressed = b'G6YkcFTuwKMVMHI2yycGQIFGbCZVNsZEZvSlOhKpHUt/BlADY94egmDAWdlrbbFrP9wH4aKcEfbLO8sa6f63VU0='
+ assert crypt_bitcoin_lib.verify("1NQUem2M4cAqWua6BVFBADtcSP55P4QobM#web/gitcenter", "19Bir5zRm1yo4pw9uuxQL8xwf9b7jqMpR", sign_uncompressed)
+
+ sign_compressed = b'H6YkcFTuwKMVMHI2yycGQIFGbCZVNsZEZvSlOhKpHUt/BlADY94egmDAWdlrbbFrP9wH4aKcEfbLO8sa6f63VU0='
+ assert crypt_bitcoin_lib.verify("1NQUem2M4cAqWua6BVFBADtcSP55P4QobM#web/gitcenter", "1KH5BdNnqxh2KRWMMT8wUXzUgz4vVQ4S8p", sign_compressed)
+
+ def testNewPrivatekey(self):
+ assert CryptBitcoin.newPrivatekey() != CryptBitcoin.newPrivatekey()
+ assert CryptBitcoin.privatekeyToAddress(CryptBitcoin.newPrivatekey())
+
+ def testNewSeed(self):
+ assert CryptBitcoin.newSeed() != CryptBitcoin.newSeed()
+ assert CryptBitcoin.privatekeyToAddress(
+ CryptBitcoin.hdPrivatekey(CryptBitcoin.newSeed(), 0)
+ )
+ assert CryptBitcoin.privatekeyToAddress(
+ CryptBitcoin.hdPrivatekey(CryptBitcoin.newSeed(), 2**256)
+ )
diff --git a/src/Test/TestCryptConnection.py b/src/Test/TestCryptConnection.py
new file mode 100644
index 00000000..46d2affc
--- /dev/null
+++ b/src/Test/TestCryptConnection.py
@@ -0,0 +1,23 @@
+import os
+
+from Config import config
+from Crypt import CryptConnection
+
+
+class TestCryptConnection:
+ def testSslCert(self):
+ # Remove old certs
+ if os.path.isfile("%s/cert-rsa.pem" % config.data_dir):
+ os.unlink("%s/cert-rsa.pem" % config.data_dir)
+ if os.path.isfile("%s/key-rsa.pem" % config.data_dir):
+ os.unlink("%s/key-rsa.pem" % config.data_dir)
+
+ # Generate certs
+ CryptConnection.manager.loadCerts()
+
+ assert "tls-rsa" in CryptConnection.manager.crypt_supported
+ assert CryptConnection.manager.selectCrypt(["tls-rsa", "unknown"]) == "tls-rsa" # It should choose the known crypt
+
+ # Check openssl cert generation
+ assert os.path.isfile("%s/cert-rsa.pem" % config.data_dir)
+ assert os.path.isfile("%s/key-rsa.pem" % config.data_dir)
diff --git a/src/Test/TestCryptHash.py b/src/Test/TestCryptHash.py
new file mode 100644
index 00000000..b91dbcca
--- /dev/null
+++ b/src/Test/TestCryptHash.py
@@ -0,0 +1,31 @@
+import base64
+
+from Crypt import CryptHash
+
+sha512t_sum_hex = "2e9466d8aa1f340c91203b4ddbe9b6669879616a1b8e9571058a74195937598d"
+sha512t_sum_bin = b".\x94f\xd8\xaa\x1f4\x0c\x91 ;M\xdb\xe9\xb6f\x98yaj\x1b\x8e\x95q\x05\x8at\x19Y7Y\x8d"
+sha256_sum_hex = "340cd04be7f530e3a7c1bc7b24f225ba5762ec7063a56e1ae01a30d56722e5c3"
+
+
+class TestCryptBitcoin:
+
+ def testSha(self, site):
+ file_path = site.storage.getPath("dbschema.json")
+ assert CryptHash.sha512sum(file_path) == sha512t_sum_hex
+ assert CryptHash.sha512sum(open(file_path, "rb")) == sha512t_sum_hex
+ assert CryptHash.sha512sum(open(file_path, "rb"), format="digest") == sha512t_sum_bin
+
+ assert CryptHash.sha256sum(file_path) == sha256_sum_hex
+ assert CryptHash.sha256sum(open(file_path, "rb")) == sha256_sum_hex
+
+ with open(file_path, "rb") as f:
+ hash = CryptHash.Sha512t(f.read(100))
+ hash.hexdigest() != sha512t_sum_hex
+ hash.update(f.read(1024 * 1024))
+ assert hash.hexdigest() == sha512t_sum_hex
+
+ def testRandom(self):
+ assert len(CryptHash.random(64)) == 64
+ assert CryptHash.random() != CryptHash.random()
+ assert bytes.fromhex(CryptHash.random(encoding="hex"))
+ assert base64.b64decode(CryptHash.random(encoding="base64"))
diff --git a/src/Test/TestDb.py b/src/Test/TestDb.py
new file mode 100644
index 00000000..67f383a3
--- /dev/null
+++ b/src/Test/TestDb.py
@@ -0,0 +1,137 @@
+import io
+
+
+class TestDb:
+ def testCheckTables(self, db):
+ tables = [row["name"] for row in db.execute("SELECT name FROM sqlite_master WHERE type='table'")]
+ assert "keyvalue" in tables # To store simple key -> value
+ assert "json" in tables # Json file path registry
+ assert "test" in tables # The table defined in dbschema.json
+
+ # Verify test table
+ cols = [col["name"] for col in db.execute("PRAGMA table_info(test)")]
+ assert "test_id" in cols
+ assert "title" in cols
+
+ # Add new table
+ assert "newtest" not in tables
+ db.schema["tables"]["newtest"] = {
+ "cols": [
+ ["newtest_id", "INTEGER"],
+ ["newtitle", "TEXT"],
+ ],
+ "indexes": ["CREATE UNIQUE INDEX newtest_id ON newtest(newtest_id)"],
+ "schema_changed": 1426195822
+ }
+ db.checkTables()
+ tables = [row["name"] for row in db.execute("SELECT name FROM sqlite_master WHERE type='table'")]
+ assert "test" in tables
+ assert "newtest" in tables
+
+ def testQueries(self, db):
+ # Test insert
+ for i in range(100):
+ db.execute("INSERT INTO test ?", {"test_id": i, "title": "Test #%s" % i})
+
+ assert db.execute("SELECT COUNT(*) AS num FROM test").fetchone()["num"] == 100
+
+ # Test single select
+ assert db.execute("SELECT COUNT(*) AS num FROM test WHERE ?", {"test_id": 1}).fetchone()["num"] == 1
+
+ # Test multiple select
+ assert db.execute("SELECT COUNT(*) AS num FROM test WHERE ?", {"test_id": [1, 2, 3]}).fetchone()["num"] == 3
+ assert db.execute(
+ "SELECT COUNT(*) AS num FROM test WHERE ?",
+ {"test_id": [1, 2, 3], "title": "Test #2"}
+ ).fetchone()["num"] == 1
+ assert db.execute(
+ "SELECT COUNT(*) AS num FROM test WHERE ?",
+ {"test_id": [1, 2, 3], "title": ["Test #2", "Test #3", "Test #4"]}
+ ).fetchone()["num"] == 2
+
+ # Test multiple select using named params
+ assert db.execute("SELECT COUNT(*) AS num FROM test WHERE test_id IN :test_id", {"test_id": [1, 2, 3]}).fetchone()["num"] == 3
+ assert db.execute(
+ "SELECT COUNT(*) AS num FROM test WHERE test_id IN :test_id AND title = :title",
+ {"test_id": [1, 2, 3], "title": "Test #2"}
+ ).fetchone()["num"] == 1
+ assert db.execute(
+ "SELECT COUNT(*) AS num FROM test WHERE test_id IN :test_id AND title IN :title",
+ {"test_id": [1, 2, 3], "title": ["Test #2", "Test #3", "Test #4"]}
+ ).fetchone()["num"] == 2
+
+ # Large ammount of IN values
+ assert db.execute(
+ "SELECT COUNT(*) AS num FROM test WHERE ?",
+ {"not__test_id": list(range(2, 3000))}
+ ).fetchone()["num"] == 2
+ assert db.execute(
+ "SELECT COUNT(*) AS num FROM test WHERE ?",
+ {"test_id": list(range(50, 3000))}
+ ).fetchone()["num"] == 50
+
+ assert db.execute(
+ "SELECT COUNT(*) AS num FROM test WHERE ?",
+ {"not__title": ["Test #%s" % i for i in range(50, 3000)]}
+ ).fetchone()["num"] == 50
+
+ assert db.execute(
+ "SELECT COUNT(*) AS num FROM test WHERE ?",
+ {"title__like": "%20%"}
+ ).fetchone()["num"] == 1
+
+ # Test named parameter escaping
+ assert db.execute(
+ "SELECT COUNT(*) AS num FROM test WHERE test_id = :test_id AND title LIKE :titlelike",
+ {"test_id": 1, "titlelike": "Test%"}
+ ).fetchone()["num"] == 1
+
+ def testEscaping(self, db):
+ # Test insert
+ for i in range(100):
+ db.execute("INSERT INTO test ?", {"test_id": i, "title": "Test '\" #%s" % i})
+
+ assert db.execute(
+ "SELECT COUNT(*) AS num FROM test WHERE ?",
+ {"title": "Test '\" #1"}
+ ).fetchone()["num"] == 1
+
+ assert db.execute(
+ "SELECT COUNT(*) AS num FROM test WHERE ?",
+ {"title": ["Test '\" #%s" % i for i in range(0, 50)]}
+ ).fetchone()["num"] == 50
+
+ assert db.execute(
+ "SELECT COUNT(*) AS num FROM test WHERE ?",
+ {"not__title": ["Test '\" #%s" % i for i in range(50, 3000)]}
+ ).fetchone()["num"] == 50
+
+
+ def testUpdateJson(self, db):
+ f = io.BytesIO()
+ f.write("""
+ {
+ "test": [
+ {"test_id": 1, "title": "Test 1 title", "extra col": "Ignore it"}
+ ]
+ }
+ """.encode())
+ f.seek(0)
+ assert db.updateJson(db.db_dir + "data.json", f) is True
+ assert db.execute("SELECT COUNT(*) AS num FROM test_importfilter").fetchone()["num"] == 1
+ assert db.execute("SELECT COUNT(*) AS num FROM test").fetchone()["num"] == 1
+
+ def testUnsafePattern(self, db):
+ db.schema["maps"] = {"[A-Za-z.]*": db.schema["maps"]["data.json"]} # Only repetition of . supported
+ f = io.StringIO()
+ f.write("""
+ {
+ "test": [
+ {"test_id": 1, "title": "Test 1 title", "extra col": "Ignore it"}
+ ]
+ }
+ """)
+ f.seek(0)
+ assert db.updateJson(db.db_dir + "data.json", f) is False
+ assert db.execute("SELECT COUNT(*) AS num FROM test_importfilter").fetchone()["num"] == 0
+ assert db.execute("SELECT COUNT(*) AS num FROM test").fetchone()["num"] == 0
diff --git a/src/Test/TestDbQuery.py b/src/Test/TestDbQuery.py
new file mode 100644
index 00000000..597bc950
--- /dev/null
+++ b/src/Test/TestDbQuery.py
@@ -0,0 +1,31 @@
+import re
+
+from Db.DbQuery import DbQuery
+
+
+class TestDbQuery:
+ def testParse(self):
+ query_text = """
+ SELECT
+ 'comment' AS type,
+ date_added, post.title AS title,
+ keyvalue.value || ': ' || comment.body AS body,
+ '?Post:' || comment.post_id || '#Comments' AS url
+ FROM
+ comment
+ LEFT JOIN json USING (json_id)
+ LEFT JOIN json AS json_content ON (json_content.directory = json.directory AND json_content.file_name='content.json')
+ LEFT JOIN keyvalue ON (keyvalue.json_id = json_content.json_id AND key = 'cert_user_id')
+ LEFT JOIN post ON (comment.post_id = post.post_id)
+ WHERE
+ post.date_added > 123
+ ORDER BY
+ date_added DESC
+ LIMIT 20
+ """
+ query = DbQuery(query_text)
+ assert query.parts["LIMIT"] == "20"
+ assert query.fields["body"] == "keyvalue.value || ': ' || comment.body"
+ assert re.sub("[ \r\n]", "", str(query)) == re.sub("[ \r\n]", "", query_text)
+ query.wheres.append("body LIKE '%hello%'")
+ assert "body LIKE '%hello%'" in str(query)
diff --git a/src/Test/TestDebug.py b/src/Test/TestDebug.py
new file mode 100644
index 00000000..e3eb20b3
--- /dev/null
+++ b/src/Test/TestDebug.py
@@ -0,0 +1,52 @@
+from Debug import Debug
+import gevent
+import os
+import re
+
+import pytest
+
+
+class TestDebug:
+ @pytest.mark.parametrize("items,expected", [
+ (["@/src/A/B/C.py:17"], ["A/B/C.py line 17"]), # basic test
+ (["@/src/Db/Db.py:17"], ["Db.py line 17"]), # path compression
+ (["%s:1" % __file__], ["TestDebug.py line 1"]),
+ (["@/plugins/Chart/ChartDb.py:100"], ["ChartDb.py line 100"]), # plugins
+ (["@/main.py:17"], ["main.py line 17"]), # root
+ (["@\\src\\Db\\__init__.py:17"], ["Db/__init__.py line 17"]), # Windows paths
+ ([":1"], []), # importlib builtins
+ ([":1"], []), # importlib builtins
+ (["/home/ivanq/ZeroNet/src/main.py:13"], ["?/src/main.py line 13"]), # best-effort anonymization
+ (["C:\\ZeroNet\\core\\src\\main.py:13"], ["?/src/main.py line 13"]),
+ (["/root/main.py:17"], ["/root/main.py line 17"]),
+ (["{gevent}:13"], ["/__init__.py line 13"]), # modules
+ (["{os}:13"], [" line 13"]), # python builtin modules
+ (["src/gevent/event.py:17"], ["/event.py line 17"]), # gevent-overriden __file__
+ (["@/src/Db/Db.py:17", "@/src/Db/DbQuery.py:1"], ["Db.py line 17", "DbQuery.py line 1"]), # mutliple args
+ (["@/src/Db/Db.py:17", "@/src/Db/Db.py:1"], ["Db.py line 17", "1"]), # same file
+ (["{os}:1", "@/src/Db/Db.py:17"], [" line 1", "Db.py line 17"]), # builtins
+ (["{gevent}:1"] + ["{os}:3"] * 4 + ["@/src/Db/Db.py:17"], ["/__init__.py line 1", "...", "Db.py line 17"])
+ ])
+ def testFormatTraceback(self, items, expected):
+ q_items = []
+ for item in items:
+ file, line = item.rsplit(":", 1)
+ if file.startswith("@"):
+ file = Debug.root_dir + file[1:]
+ file = file.replace("{os}", os.__file__)
+ file = file.replace("{gevent}", gevent.__file__)
+ q_items.append((file, int(line)))
+ assert Debug.formatTraceback(q_items) == expected
+
+ def testFormatException(self):
+ try:
+ raise ValueError("Test exception")
+ except Exception:
+ assert re.match(r"ValueError: Test exception in TestDebug.py line [0-9]+", Debug.formatException())
+ try:
+ os.path.abspath(1)
+ except Exception:
+ assert re.search(r"in TestDebug.py line [0-9]+ > <(posixpath|ntpath)> line ", Debug.formatException())
+
+ def testFormatStack(self):
+ assert re.match(r"TestDebug.py line [0-9]+ > <_pytest>/python.py line [0-9]+", Debug.formatStack())
diff --git a/src/Test/TestDiff.py b/src/Test/TestDiff.py
new file mode 100644
index 00000000..622951a1
--- /dev/null
+++ b/src/Test/TestDiff.py
@@ -0,0 +1,58 @@
+import io
+
+from util import Diff
+
+
+class TestDiff:
+ def testDiff(self):
+ assert Diff.diff(
+ [],
+ ["one", "two", "three"]
+ ) == [("+", ["one", "two","three"])]
+
+ assert Diff.diff(
+ ["one", "two", "three"],
+ ["one", "two", "three", "four", "five"]
+ ) == [("=", 11), ("+", ["four", "five"])]
+
+ assert Diff.diff(
+ ["one", "two", "three", "six"],
+ ["one", "two", "three", "four", "five", "six"]
+ ) == [("=", 11), ("+", ["four", "five"]), ("=", 3)]
+
+ assert Diff.diff(
+ ["one", "two", "three", "hmm", "six"],
+ ["one", "two", "three", "four", "five", "six"]
+ ) == [("=", 11), ("-", 3), ("+", ["four", "five"]), ("=", 3)]
+
+ assert Diff.diff(
+ ["one", "two", "three"],
+ []
+ ) == [("-", 11)]
+
+ def testUtf8(self):
+ assert Diff.diff(
+ ["one", "\xe5\xad\xa6\xe4\xb9\xa0\xe4\xb8\x8b", "two", "three"],
+ ["one", "\xe5\xad\xa6\xe4\xb9\xa0\xe4\xb8\x8b", "two", "three", "four", "five"]
+ ) == [("=", 20), ("+", ["four", "five"])]
+
+ def testDiffLimit(self):
+ old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix")
+ new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix")
+ actions = Diff.diff(list(old_f), list(new_f), limit=1024)
+ assert actions
+
+ old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix")
+ new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix"*1024)
+ actions = Diff.diff(list(old_f), list(new_f), limit=1024)
+ assert actions is False
+
+ def testPatch(self):
+ old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix")
+ new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix")
+ actions = Diff.diff(
+ list(old_f),
+ list(new_f)
+ )
+ old_f.seek(0)
+ assert Diff.patch(old_f, actions).getvalue() == new_f.getvalue()
diff --git a/src/Test/TestEvent.py b/src/Test/TestEvent.py
new file mode 100644
index 00000000..8bdafaaa
--- /dev/null
+++ b/src/Test/TestEvent.py
@@ -0,0 +1,65 @@
+import util
+
+
+class ExampleClass(object):
+ def __init__(self):
+ self.called = []
+ self.onChanged = util.Event()
+
+ def increment(self, title):
+ self.called.append(title)
+
+
+class TestEvent:
+ def testEvent(self):
+ test_obj = ExampleClass()
+ test_obj.onChanged.append(lambda: test_obj.increment("Called #1"))
+ test_obj.onChanged.append(lambda: test_obj.increment("Called #2"))
+ test_obj.onChanged.once(lambda: test_obj.increment("Once"))
+
+ assert test_obj.called == []
+ test_obj.onChanged()
+ assert test_obj.called == ["Called #1", "Called #2", "Once"]
+ test_obj.onChanged()
+ test_obj.onChanged()
+ assert test_obj.called == ["Called #1", "Called #2", "Once", "Called #1", "Called #2", "Called #1", "Called #2"]
+
+ def testOnce(self):
+ test_obj = ExampleClass()
+ test_obj.onChanged.once(lambda: test_obj.increment("Once test #1"))
+
+ # It should be called only once
+ assert test_obj.called == []
+ test_obj.onChanged()
+ assert test_obj.called == ["Once test #1"]
+ test_obj.onChanged()
+ test_obj.onChanged()
+ assert test_obj.called == ["Once test #1"]
+
+ def testOnceMultiple(self):
+ test_obj = ExampleClass()
+ # Allow queue more than once
+ test_obj.onChanged.once(lambda: test_obj.increment("Once test #1"))
+ test_obj.onChanged.once(lambda: test_obj.increment("Once test #2"))
+ test_obj.onChanged.once(lambda: test_obj.increment("Once test #3"))
+
+ assert test_obj.called == []
+ test_obj.onChanged()
+ assert test_obj.called == ["Once test #1", "Once test #2", "Once test #3"]
+ test_obj.onChanged()
+ test_obj.onChanged()
+ assert test_obj.called == ["Once test #1", "Once test #2", "Once test #3"]
+
+ def testOnceNamed(self):
+ test_obj = ExampleClass()
+ # Dont store more that one from same type
+ test_obj.onChanged.once(lambda: test_obj.increment("Once test #1/1"), "type 1")
+ test_obj.onChanged.once(lambda: test_obj.increment("Once test #1/2"), "type 1")
+ test_obj.onChanged.once(lambda: test_obj.increment("Once test #2"), "type 2")
+
+ assert test_obj.called == []
+ test_obj.onChanged()
+ assert test_obj.called == ["Once test #1/1", "Once test #2"]
+ test_obj.onChanged()
+ test_obj.onChanged()
+ assert test_obj.called == ["Once test #1/1", "Once test #2"]
diff --git a/src/Test/TestFileRequest.py b/src/Test/TestFileRequest.py
new file mode 100644
index 00000000..3fabc271
--- /dev/null
+++ b/src/Test/TestFileRequest.py
@@ -0,0 +1,124 @@
+import io
+
+import pytest
+import time
+
+from Connection import ConnectionServer
+from Connection import Connection
+from File import FileServer
+
+
+@pytest.mark.usefixtures("resetSettings")
+@pytest.mark.usefixtures("resetTempSettings")
+class TestFileRequest:
+ def testGetFile(self, file_server, site):
+ file_server.ip_incoming = {} # Reset flood protection
+ client = ConnectionServer(file_server.ip, 1545)
+
+ connection = client.getConnection(file_server.ip, 1544)
+ file_server.sites[site.address] = site
+
+ # Normal request
+ response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0})
+ assert b"sign" in response["body"]
+
+ response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": site.storage.getSize("content.json")})
+ assert b"sign" in response["body"]
+
+ # Invalid file
+ response = connection.request("getFile", {"site": site.address, "inner_path": "invalid.file", "location": 0})
+ assert "File read error" in response["error"]
+
+ # Location over size
+ response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 1024 * 1024})
+ assert "File read error" in response["error"]
+
+ # Stream from parent dir
+ response = connection.request("getFile", {"site": site.address, "inner_path": "../users.json", "location": 0})
+ assert "File read exception" in response["error"]
+
+ # Invalid site
+ response = connection.request("getFile", {"site": "", "inner_path": "users.json", "location": 0})
+ assert "Unknown site" in response["error"]
+
+ response = connection.request("getFile", {"site": ".", "inner_path": "users.json", "location": 0})
+ assert "Unknown site" in response["error"]
+
+ # Invalid size
+ response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": 1234})
+ assert "File size does not match" in response["error"]
+
+ # Invalid path
+ for path in ["../users.json", "./../users.json", "data/../content.json", ".../users.json"]:
+ for sep in ["/", "\\"]:
+ response = connection.request("getFile", {"site": site.address, "inner_path": path.replace("/", sep), "location": 0})
+ assert response["error"] == 'File read exception'
+
+ connection.close()
+ client.stop()
+
+ def testStreamFile(self, file_server, site):
+ file_server.ip_incoming = {} # Reset flood protection
+ client = ConnectionServer(file_server.ip, 1545)
+ connection = client.getConnection(file_server.ip, 1544)
+ file_server.sites[site.address] = site
+
+ buff = io.BytesIO()
+ response = connection.request("streamFile", {"site": site.address, "inner_path": "content.json", "location": 0}, buff)
+ assert "stream_bytes" in response
+ assert b"sign" in buff.getvalue()
+
+ # Invalid file
+ buff = io.BytesIO()
+ response = connection.request("streamFile", {"site": site.address, "inner_path": "invalid.file", "location": 0}, buff)
+ assert "File read error" in response["error"]
+
+ # Location over size
+ buff = io.BytesIO()
+ response = connection.request(
+ "streamFile", {"site": site.address, "inner_path": "content.json", "location": 1024 * 1024}, buff
+ )
+ assert "File read error" in response["error"]
+
+ # Stream from parent dir
+ buff = io.BytesIO()
+ response = connection.request("streamFile", {"site": site.address, "inner_path": "../users.json", "location": 0}, buff)
+ assert "File read exception" in response["error"]
+
+ connection.close()
+ client.stop()
+
+ def testPex(self, file_server, site, site_temp):
+ file_server.sites[site.address] = site
+ client = FileServer(file_server.ip, 1545)
+ client.sites = {site_temp.address: site_temp}
+ site_temp.connection_server = client
+ connection = client.getConnection(file_server.ip, 1544)
+
+ # Add new fake peer to site
+ fake_peer = site.addPeer(file_server.ip_external, 11337, return_peer=True)
+ # Add fake connection to it
+ fake_peer.connection = Connection(file_server, file_server.ip_external, 11337)
+ fake_peer.connection.last_recv_time = time.time()
+ assert fake_peer in site.getConnectablePeers()
+
+ # Add file_server as peer to client
+ peer_file_server = site_temp.addPeer(file_server.ip, 1544)
+
+ assert "%s:11337" % file_server.ip_external not in site_temp.peers
+ assert peer_file_server.pex()
+ assert "%s:11337" % file_server.ip_external in site_temp.peers
+
+ # Should not exchange private peers from local network
+ fake_peer_private = site.addPeer("192.168.0.1", 11337, return_peer=True)
+ assert fake_peer_private not in site.getConnectablePeers(allow_private=False)
+ fake_peer_private.connection = Connection(file_server, "192.168.0.1", 11337)
+ fake_peer_private.connection.last_recv_time = time.time()
+
+ assert "192.168.0.1:11337" not in site_temp.peers
+ assert not peer_file_server.pex()
+ assert "192.168.0.1:11337" not in site_temp.peers
+
+
+ connection.close()
+ client.stop()
diff --git a/src/Test/TestFlag.py b/src/Test/TestFlag.py
new file mode 100644
index 00000000..12fd8165
--- /dev/null
+++ b/src/Test/TestFlag.py
@@ -0,0 +1,39 @@
+import os
+
+import pytest
+
+from util.Flag import Flag
+
+class TestFlag:
+ def testFlagging(self):
+ flag = Flag()
+ @flag.admin
+ @flag.no_multiuser
+ def testFn(anything):
+ return anything
+
+ assert "admin" in flag.db["testFn"]
+ assert "no_multiuser" in flag.db["testFn"]
+
+ def testSubclassedFlagging(self):
+ flag = Flag()
+ class Test:
+ @flag.admin
+ @flag.no_multiuser
+ def testFn(anything):
+ return anything
+
+ class SubTest(Test):
+ pass
+
+ assert "admin" in flag.db["testFn"]
+ assert "no_multiuser" in flag.db["testFn"]
+
+ def testInvalidFlag(self):
+ flag = Flag()
+ with pytest.raises(Exception) as err:
+ @flag.no_multiuser
+ @flag.unknown_flag
+ def testFn(anything):
+ return anything
+ assert "Invalid flag" in str(err.value)
diff --git a/src/Test/TestHelper.py b/src/Test/TestHelper.py
new file mode 100644
index 00000000..07644ec0
--- /dev/null
+++ b/src/Test/TestHelper.py
@@ -0,0 +1,79 @@
+import socket
+import struct
+import os
+
+import pytest
+from util import helper
+from Config import config
+
+
+@pytest.mark.usefixtures("resetSettings")
+class TestHelper:
+ def testShellquote(self):
+ assert helper.shellquote("hel'lo") == "\"hel'lo\"" # Allow '
+ assert helper.shellquote('hel"lo') == '"hello"' # Remove "
+ assert helper.shellquote("hel'lo", 'hel"lo') == ('"hel\'lo"', '"hello"')
+
+ def testPackAddress(self):
+ for port in [1, 1000, 65535]:
+ for ip in ["1.1.1.1", "127.0.0.1", "0.0.0.0", "255.255.255.255", "192.168.1.1"]:
+ assert len(helper.packAddress(ip, port)) == 6
+ assert helper.unpackAddress(helper.packAddress(ip, port)) == (ip, port)
+
+ for ip in ["1:2:3:4:5:6:7:8", "::1", "2001:19f0:6c01:e76:5400:1ff:fed6:3eca", "2001:4860:4860::8888"]:
+ assert len(helper.packAddress(ip, port)) == 18
+ assert helper.unpackAddress(helper.packAddress(ip, port)) == (ip, port)
+
+ assert len(helper.packOnionAddress("boot3rdez4rzn36x.onion", port)) == 12
+ assert helper.unpackOnionAddress(helper.packOnionAddress("boot3rdez4rzn36x.onion", port)) == ("boot3rdez4rzn36x.onion", port)
+
+ with pytest.raises(struct.error):
+ helper.packAddress("1.1.1.1", 100000)
+
+ with pytest.raises(socket.error):
+ helper.packAddress("999.1.1.1", 1)
+
+ with pytest.raises(Exception):
+ helper.unpackAddress("X")
+
+ def testGetDirname(self):
+ assert helper.getDirname("data/users/content.json") == "data/users/"
+ assert helper.getDirname("data/users") == "data/"
+ assert helper.getDirname("") == ""
+ assert helper.getDirname("content.json") == ""
+ assert helper.getDirname("data/users/") == "data/users/"
+ assert helper.getDirname("/data/users/content.json") == "data/users/"
+
+ def testGetFilename(self):
+ assert helper.getFilename("data/users/content.json") == "content.json"
+ assert helper.getFilename("data/users") == "users"
+ assert helper.getFilename("") == ""
+ assert helper.getFilename("content.json") == "content.json"
+ assert helper.getFilename("data/users/") == ""
+ assert helper.getFilename("/data/users/content.json") == "content.json"
+
+ def testIsIp(self):
+ assert helper.isIp("1.2.3.4")
+ assert helper.isIp("255.255.255.255")
+ assert not helper.isIp("any.host")
+ assert not helper.isIp("1.2.3.4.com")
+ assert not helper.isIp("1.2.3.4.any.host")
+
+ def testIsPrivateIp(self):
+ assert helper.isPrivateIp("192.168.1.1")
+ assert not helper.isPrivateIp("1.1.1.1")
+ assert helper.isPrivateIp("fe80::44f0:3d0:4e6:637c")
+ assert not helper.isPrivateIp("fca5:95d6:bfde:d902:8951:276e:1111:a22c") # cjdns
+
+ def testOpenLocked(self):
+ locked_f = helper.openLocked(config.data_dir + "/locked.file")
+ assert locked_f
+ with pytest.raises(BlockingIOError):
+ locked_f_again = helper.openLocked(config.data_dir + "/locked.file")
+ locked_f_different = helper.openLocked(config.data_dir + "/locked_different.file")
+
+ locked_f.close()
+ locked_f_different.close()
+
+ os.unlink(locked_f.name)
+ os.unlink(locked_f_different.name)
diff --git a/src/Test/TestMsgpack.py b/src/Test/TestMsgpack.py
new file mode 100644
index 00000000..5a0b6d4d
--- /dev/null
+++ b/src/Test/TestMsgpack.py
@@ -0,0 +1,88 @@
+import io
+import os
+
+import msgpack
+import pytest
+
+from Config import config
+from util import Msgpack
+from collections import OrderedDict
+
+
+class TestMsgpack:
+ test_data = OrderedDict(
+ sorted({"cmd": "fileGet", "bin": b'p\x81zDhL\xf0O\xd0\xaf', "params": {"site": "1Site"}, "utf8": b'\xc3\xa1rv\xc3\xadzt\xc5\xb1r\xc5\x91'.decode("utf8"), "list": [b'p\x81zDhL\xf0O\xd0\xaf', b'p\x81zDhL\xf0O\xd0\xaf']}.items())
+ )
+
+ def testPacking(self):
+ assert Msgpack.pack(self.test_data) == b'\x85\xa3bin\xc4\np\x81zDhL\xf0O\xd0\xaf\xa3cmd\xa7fileGet\xa4list\x92\xc4\np\x81zDhL\xf0O\xd0\xaf\xc4\np\x81zDhL\xf0O\xd0\xaf\xa6params\x81\xa4site\xa51Site\xa4utf8\xad\xc3\xa1rv\xc3\xadzt\xc5\xb1r\xc5\x91'
+ assert Msgpack.pack(self.test_data, use_bin_type=False) == b'\x85\xa3bin\xaap\x81zDhL\xf0O\xd0\xaf\xa3cmd\xa7fileGet\xa4list\x92\xaap\x81zDhL\xf0O\xd0\xaf\xaap\x81zDhL\xf0O\xd0\xaf\xa6params\x81\xa4site\xa51Site\xa4utf8\xad\xc3\xa1rv\xc3\xadzt\xc5\xb1r\xc5\x91'
+
+ def testUnpackinkg(self):
+ assert Msgpack.unpack(Msgpack.pack(self.test_data)) == self.test_data
+
+ @pytest.mark.parametrize("unpacker_class", [msgpack.Unpacker, msgpack.fallback.Unpacker])
+ def testUnpacker(self, unpacker_class):
+ unpacker = unpacker_class(raw=False)
+
+ data = msgpack.packb(self.test_data, use_bin_type=True)
+ data += msgpack.packb(self.test_data, use_bin_type=True)
+
+ messages = []
+ for char in data:
+ unpacker.feed(bytes([char]))
+ for message in unpacker:
+ messages.append(message)
+
+ assert len(messages) == 2
+ assert messages[0] == self.test_data
+ assert messages[0] == messages[1]
+
+ def testStreaming(self):
+ bin_data = os.urandom(20)
+ f = Msgpack.FilePart("%s/users.json" % config.data_dir, "rb")
+ f.read_bytes = 30
+
+ data = {"cmd": "response", "body": f, "bin": bin_data}
+
+ out_buff = io.BytesIO()
+ Msgpack.stream(data, out_buff.write)
+ out_buff.seek(0)
+
+ data_packb = {
+ "cmd": "response",
+ "body": open("%s/users.json" % config.data_dir, "rb").read(30),
+ "bin": bin_data
+ }
+
+ out_buff.seek(0)
+ data_unpacked = Msgpack.unpack(out_buff.read())
+ assert data_unpacked == data_packb
+ assert data_unpacked["cmd"] == "response"
+ assert type(data_unpacked["body"]) == bytes
+
+ def testBackwardCompatibility(self):
+ packed = {}
+ packed["py3"] = Msgpack.pack(self.test_data, use_bin_type=False)
+ packed["py3_bin"] = Msgpack.pack(self.test_data, use_bin_type=True)
+ for key, val in packed.items():
+ unpacked = Msgpack.unpack(val)
+ type(unpacked["utf8"]) == str
+ type(unpacked["bin"]) == bytes
+
+ # Packed with use_bin_type=False (pre-ZeroNet 0.7.0)
+ unpacked = Msgpack.unpack(packed["py3"], decode=True)
+ type(unpacked["utf8"]) == str
+ type(unpacked["bin"]) == bytes
+ assert len(unpacked["utf8"]) == 9
+ assert len(unpacked["bin"]) == 10
+ with pytest.raises(UnicodeDecodeError) as err: # Try to decode binary as utf-8
+ unpacked = Msgpack.unpack(packed["py3"], decode=False)
+
+ # Packed with use_bin_type=True
+ unpacked = Msgpack.unpack(packed["py3_bin"], decode=False)
+ type(unpacked["utf8"]) == str
+ type(unpacked["bin"]) == bytes
+ assert len(unpacked["utf8"]) == 9
+ assert len(unpacked["bin"]) == 10
+
diff --git a/src/Test/TestNoparallel.py b/src/Test/TestNoparallel.py
new file mode 100644
index 00000000..6fc4f57d
--- /dev/null
+++ b/src/Test/TestNoparallel.py
@@ -0,0 +1,167 @@
+import time
+
+import gevent
+import pytest
+
+import util
+from util import ThreadPool
+
+
+@pytest.fixture(params=['gevent.spawn', 'thread_pool.spawn'])
+def queue_spawn(request):
+ thread_pool = ThreadPool.ThreadPool(10)
+ if request.param == "gevent.spawn":
+ return gevent.spawn
+ else:
+ return thread_pool.spawn
+
+
+class ExampleClass(object):
+ def __init__(self):
+ self.counted = 0
+
+ @util.Noparallel()
+ def countBlocking(self, num=5):
+ for i in range(1, num + 1):
+ time.sleep(0.1)
+ self.counted += 1
+ return "counted:%s" % i
+
+ @util.Noparallel(queue=True, ignore_class=True)
+ def countQueue(self, num=5):
+ for i in range(1, num + 1):
+ time.sleep(0.1)
+ self.counted += 1
+ return "counted:%s" % i
+
+ @util.Noparallel(blocking=False)
+ def countNoblocking(self, num=5):
+ for i in range(1, num + 1):
+ time.sleep(0.01)
+ self.counted += 1
+ return "counted:%s" % i
+
+
+class TestNoparallel:
+ def testBlocking(self, queue_spawn):
+ obj1 = ExampleClass()
+ obj2 = ExampleClass()
+
+ # Dont allow to call again until its running and wait until its running
+ threads = [
+ queue_spawn(obj1.countBlocking),
+ queue_spawn(obj1.countBlocking),
+ queue_spawn(obj1.countBlocking),
+ queue_spawn(obj2.countBlocking)
+ ]
+ assert obj2.countBlocking() == "counted:5" # The call is ignored as obj2.countBlocking already counting, but block until its finishes
+ gevent.joinall(threads)
+ assert [thread.value for thread in threads] == ["counted:5", "counted:5", "counted:5", "counted:5"]
+ obj2.countBlocking() # Allow to call again as obj2.countBlocking finished
+
+ assert obj1.counted == 5
+ assert obj2.counted == 10
+
+ def testNoblocking(self):
+ obj1 = ExampleClass()
+
+ thread1 = obj1.countNoblocking()
+ thread2 = obj1.countNoblocking() # Ignored
+
+ assert obj1.counted == 0
+ time.sleep(0.1)
+ assert thread1.value == "counted:5"
+ assert thread2.value == "counted:5"
+ assert obj1.counted == 5
+
+ obj1.countNoblocking().join() # Allow again and wait until finishes
+ assert obj1.counted == 10
+
+ def testQueue(self, queue_spawn):
+ obj1 = ExampleClass()
+
+ queue_spawn(obj1.countQueue, num=1)
+ queue_spawn(obj1.countQueue, num=1)
+ queue_spawn(obj1.countQueue, num=1)
+
+ time.sleep(0.3)
+ assert obj1.counted == 2 # No multi-queue supported
+
+ obj2 = ExampleClass()
+ queue_spawn(obj2.countQueue, num=10)
+ queue_spawn(obj2.countQueue, num=10)
+
+ time.sleep(1.5) # Call 1 finished, call 2 still working
+ assert 10 < obj2.counted < 20
+
+ queue_spawn(obj2.countQueue, num=10)
+ time.sleep(2.0)
+
+ assert obj2.counted == 30
+
+ def testQueueOverload(self):
+ obj1 = ExampleClass()
+
+ threads = []
+ for i in range(1000):
+ thread = gevent.spawn(obj1.countQueue, num=5)
+ threads.append(thread)
+
+ gevent.joinall(threads)
+ assert obj1.counted == 5 * 2 # Only called twice (no multi-queue allowed)
+
+ def testIgnoreClass(self, queue_spawn):
+ obj1 = ExampleClass()
+ obj2 = ExampleClass()
+
+ threads = [
+ queue_spawn(obj1.countQueue),
+ queue_spawn(obj1.countQueue),
+ queue_spawn(obj1.countQueue),
+ queue_spawn(obj2.countQueue),
+ queue_spawn(obj2.countQueue)
+ ]
+ s = time.time()
+ time.sleep(0.001)
+ gevent.joinall(threads)
+
+ # Queue limited to 2 calls (every call takes counts to 5 and takes 0.05 sec)
+ assert obj1.counted + obj2.counted == 10
+
+ taken = time.time() - s
+ assert 1.2 > taken >= 1.0 # 2 * 0.5s count = ~1s
+
+ def testException(self, queue_spawn):
+ class MyException(Exception):
+ pass
+
+ @util.Noparallel()
+ def raiseException():
+ raise MyException("Test error!")
+
+ with pytest.raises(MyException) as err:
+ raiseException()
+ assert str(err.value) == "Test error!"
+
+ with pytest.raises(MyException) as err:
+ queue_spawn(raiseException).get()
+ assert str(err.value) == "Test error!"
+
+ def testMultithreadMix(self, queue_spawn):
+ obj1 = ExampleClass()
+ with ThreadPool.ThreadPool(10) as thread_pool:
+ s = time.time()
+ t1 = queue_spawn(obj1.countBlocking, 5)
+ time.sleep(0.01)
+ t2 = thread_pool.spawn(obj1.countBlocking, 5)
+ time.sleep(0.01)
+ t3 = thread_pool.spawn(obj1.countBlocking, 5)
+ time.sleep(0.3)
+ t4 = gevent.spawn(obj1.countBlocking, 5)
+ threads = [t1, t2, t3, t4]
+ for thread in threads:
+ assert thread.get() == "counted:5"
+
+ time_taken = time.time() - s
+ assert obj1.counted == 5
+ assert 0.5 < time_taken < 0.7
diff --git a/src/Test/TestPeer.py b/src/Test/TestPeer.py
new file mode 100644
index 00000000..f57e046e
--- /dev/null
+++ b/src/Test/TestPeer.py
@@ -0,0 +1,159 @@
+import time
+import io
+
+import pytest
+
+from File import FileServer
+from File import FileRequest
+from Crypt import CryptHash
+from . import Spy
+
+
+@pytest.mark.usefixtures("resetSettings")
+@pytest.mark.usefixtures("resetTempSettings")
+class TestPeer:
+ def testPing(self, file_server, site, site_temp):
+ file_server.sites[site.address] = site
+ client = FileServer(file_server.ip, 1545)
+ client.sites = {site_temp.address: site_temp}
+ site_temp.connection_server = client
+ connection = client.getConnection(file_server.ip, 1544)
+
+ # Add file_server as peer to client
+ peer_file_server = site_temp.addPeer(file_server.ip, 1544)
+
+ assert peer_file_server.ping() is not None
+
+ assert peer_file_server in site_temp.peers.values()
+ peer_file_server.remove()
+ assert peer_file_server not in site_temp.peers.values()
+
+ connection.close()
+ client.stop()
+
+ def testDownloadFile(self, file_server, site, site_temp):
+ file_server.sites[site.address] = site
+ client = FileServer(file_server.ip, 1545)
+ client.sites = {site_temp.address: site_temp}
+ site_temp.connection_server = client
+ connection = client.getConnection(file_server.ip, 1544)
+
+ # Add file_server as peer to client
+ peer_file_server = site_temp.addPeer(file_server.ip, 1544)
+
+ # Testing streamFile
+ buff = peer_file_server.getFile(site_temp.address, "content.json", streaming=True)
+ assert b"sign" in buff.getvalue()
+
+ # Testing getFile
+ buff = peer_file_server.getFile(site_temp.address, "content.json")
+ assert b"sign" in buff.getvalue()
+
+ connection.close()
+ client.stop()
+
+ def testHashfield(self, site):
+ sample_hash = list(site.content_manager.contents["content.json"]["files_optional"].values())[0]["sha512"]
+
+ site.storage.verifyFiles(quick_check=True) # Find what optional files we have
+
+ # Check if hashfield has any files
+ assert site.content_manager.hashfield
+ assert len(site.content_manager.hashfield) > 0
+
+ # Check exsist hash
+ assert site.content_manager.hashfield.getHashId(sample_hash) in site.content_manager.hashfield
+
+ # Add new hash
+ new_hash = CryptHash.sha512sum(io.BytesIO(b"hello"))
+ assert site.content_manager.hashfield.getHashId(new_hash) not in site.content_manager.hashfield
+ assert site.content_manager.hashfield.appendHash(new_hash)
+ assert not site.content_manager.hashfield.appendHash(new_hash) # Don't add second time
+ assert site.content_manager.hashfield.getHashId(new_hash) in site.content_manager.hashfield
+
+ # Remove new hash
+ assert site.content_manager.hashfield.removeHash(new_hash)
+ assert site.content_manager.hashfield.getHashId(new_hash) not in site.content_manager.hashfield
+
+ def testHashfieldExchange(self, file_server, site, site_temp):
+ server1 = file_server
+ server1.sites[site.address] = site
+ site.connection_server = server1
+
+ server2 = FileServer(file_server.ip, 1545)
+ server2.sites[site_temp.address] = site_temp
+ site_temp.connection_server = server2
+ site.storage.verifyFiles(quick_check=True) # Find what optional files we have
+
+ # Add file_server as peer to client
+ server2_peer1 = site_temp.addPeer(file_server.ip, 1544)
+
+ # Check if hashfield has any files
+ assert len(site.content_manager.hashfield) > 0
+
+ # Testing hashfield sync
+ assert len(server2_peer1.hashfield) == 0
+ assert server2_peer1.updateHashfield() # Query hashfield from peer
+ assert len(server2_peer1.hashfield) > 0
+
+ # Test force push new hashfield
+ site_temp.content_manager.hashfield.appendHash("AABB")
+ server1_peer2 = site.addPeer(file_server.ip, 1545, return_peer=True)
+ with Spy.Spy(FileRequest, "route") as requests:
+ assert len(server1_peer2.hashfield) == 0
+ server2_peer1.sendMyHashfield()
+ assert len(server1_peer2.hashfield) == 1
+ server2_peer1.sendMyHashfield() # Hashfield not changed, should be ignored
+
+ assert len(requests) == 1
+
+ time.sleep(0.01) # To make hashfield change date different
+
+ site_temp.content_manager.hashfield.appendHash("AACC")
+ server2_peer1.sendMyHashfield() # Push hashfield
+
+ assert len(server1_peer2.hashfield) == 2
+ assert len(requests) == 2
+
+ site_temp.content_manager.hashfield.appendHash("AADD")
+
+ assert server1_peer2.updateHashfield(force=True) # Request hashfield
+ assert len(server1_peer2.hashfield) == 3
+ assert len(requests) == 3
+
+ assert not server2_peer1.sendMyHashfield() # Not changed, should be ignored
+ assert len(requests) == 3
+
+ server2.stop()
+
+ def testFindHash(self, file_server, site, site_temp):
+ file_server.sites[site.address] = site
+ client = FileServer(file_server.ip, 1545)
+ client.sites = {site_temp.address: site_temp}
+ site_temp.connection_server = client
+
+ # Add file_server as peer to client
+ peer_file_server = site_temp.addPeer(file_server.ip, 1544)
+
+ assert peer_file_server.findHashIds([1234]) == {}
+
+ # Add fake peer with requred hash
+ fake_peer_1 = site.addPeer(file_server.ip_external, 1544)
+ fake_peer_1.hashfield.append(1234)
+ fake_peer_2 = site.addPeer("1.2.3.5", 1545)
+ fake_peer_2.hashfield.append(1234)
+ fake_peer_2.hashfield.append(1235)
+ fake_peer_3 = site.addPeer("1.2.3.6", 1546)
+ fake_peer_3.hashfield.append(1235)
+ fake_peer_3.hashfield.append(1236)
+
+ res = peer_file_server.findHashIds([1234, 1235])
+ assert sorted(res[1234]) == sorted([(file_server.ip_external, 1544), ("1.2.3.5", 1545)])
+ assert sorted(res[1235]) == sorted([("1.2.3.5", 1545), ("1.2.3.6", 1546)])
+
+ # Test my address adding
+ site.content_manager.hashfield.append(1234)
+
+ res = peer_file_server.findHashIds([1234, 1235])
+ assert sorted(res[1234]) == sorted([(file_server.ip_external, 1544), ("1.2.3.5", 1545), (file_server.ip, 1544)])
+ assert sorted(res[1235]) == sorted([("1.2.3.5", 1545), ("1.2.3.6", 1546)])
diff --git a/src/Test/TestRateLimit.py b/src/Test/TestRateLimit.py
new file mode 100644
index 00000000..fafa5f1a
--- /dev/null
+++ b/src/Test/TestRateLimit.py
@@ -0,0 +1,100 @@
+import time
+
+import gevent
+
+from util import RateLimit
+
+
+# Time is around limit +/- 0.05 sec
+def around(t, limit):
+ return t >= limit - 0.05 and t <= limit + 0.05
+
+
+class ExampleClass(object):
+ def __init__(self):
+ self.counted = 0
+ self.last_called = None
+
+ def count(self, back="counted"):
+ self.counted += 1
+ self.last_called = back
+ return back
+
+
+class TestRateLimit:
+ def testCall(self):
+ obj1 = ExampleClass()
+ obj2 = ExampleClass()
+
+ s = time.time()
+ assert RateLimit.call("counting", allowed_again=0.1, func=obj1.count) == "counted"
+ assert around(time.time() - s, 0.0) # First allow to call instantly
+ assert obj1.counted == 1
+
+ # Call again
+ assert not RateLimit.isAllowed("counting", 0.1)
+ assert RateLimit.isAllowed("something else", 0.1)
+ assert RateLimit.call("counting", allowed_again=0.1, func=obj1.count) == "counted"
+ assert around(time.time() - s, 0.1) # Delays second call within interval
+ assert obj1.counted == 2
+ time.sleep(0.1) # Wait the cooldown time
+
+ # Call 3 times async
+ s = time.time()
+ assert obj2.counted == 0
+ threads = [
+ gevent.spawn(lambda: RateLimit.call("counting", allowed_again=0.1, func=obj2.count)), # Instant
+ gevent.spawn(lambda: RateLimit.call("counting", allowed_again=0.1, func=obj2.count)), # 0.1s delay
+ gevent.spawn(lambda: RateLimit.call("counting", allowed_again=0.1, func=obj2.count)) # 0.2s delay
+ ]
+ gevent.joinall(threads)
+ assert [thread.value for thread in threads] == ["counted", "counted", "counted"]
+ assert around(time.time() - s, 0.2)
+
+ # Wait 0.1s cooldown
+ assert not RateLimit.isAllowed("counting", 0.1)
+ time.sleep(0.11)
+ assert RateLimit.isAllowed("counting", 0.1)
+
+ # No queue = instant again
+ s = time.time()
+ assert RateLimit.isAllowed("counting", 0.1)
+ assert RateLimit.call("counting", allowed_again=0.1, func=obj2.count) == "counted"
+ assert around(time.time() - s, 0.0)
+
+ assert obj2.counted == 4
+
+ def testCallAsync(self):
+ obj1 = ExampleClass()
+ obj2 = ExampleClass()
+
+ s = time.time()
+ RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #1").join()
+ assert obj1.counted == 1 # First instant
+ assert around(time.time() - s, 0.0)
+
+ # After that the calls delayed
+ s = time.time()
+ t1 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #2") # Dumped by the next call
+ time.sleep(0.03)
+ t2 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #3") # Dumped by the next call
+ time.sleep(0.03)
+ t3 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #4") # Will be called
+ assert obj1.counted == 1 # Delay still in progress: Not called yet
+ t3.join()
+ assert t3.value == "call #4"
+ assert around(time.time() - s, 0.1)
+
+ # Only the last one called
+ assert obj1.counted == 2
+ assert obj1.last_called == "call #4"
+
+ # Just called, not allowed again
+ assert not RateLimit.isAllowed("counting async", 0.1)
+ s = time.time()
+ t4 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #5").join()
+ assert obj1.counted == 3
+ assert around(time.time() - s, 0.1)
+ assert not RateLimit.isAllowed("counting async", 0.1)
+ time.sleep(0.11)
+ assert RateLimit.isAllowed("counting async", 0.1)
diff --git a/src/Test/TestSafeRe.py b/src/Test/TestSafeRe.py
new file mode 100644
index 00000000..429bde50
--- /dev/null
+++ b/src/Test/TestSafeRe.py
@@ -0,0 +1,24 @@
+from util import SafeRe
+
+import pytest
+
+
+class TestSafeRe:
+ def testSafeMatch(self):
+ assert SafeRe.match(
+ "((js|css)/(?!all.(js|css))|data/users/.*db|data/users/.*/.*|data/archived|.*.py)",
+ "js/ZeroTalk.coffee"
+ )
+ assert SafeRe.match(".+/data.json", "data/users/1J3rJ8ecnwH2EPYa6MrgZttBNc61ACFiCj/data.json")
+
+ @pytest.mark.parametrize("pattern", ["([a-zA-Z]+)*", "(a|aa)+*", "(a|a?)+", "(.*a){10}", "((?!json).)*$", r"(\w+\d+)+C"])
+ def testUnsafeMatch(self, pattern):
+ with pytest.raises(SafeRe.UnsafePatternError) as err:
+ SafeRe.match(pattern, "aaaaaaaaaaaaaaaaaaaaaaaa!")
+ assert "Potentially unsafe" in str(err.value)
+
+ @pytest.mark.parametrize("pattern", ["^(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)$"])
+ def testUnsafeRepetition(self, pattern):
+ with pytest.raises(SafeRe.UnsafePatternError) as err:
+ SafeRe.match(pattern, "aaaaaaaaaaaaaaaaaaaaaaaa!")
+ assert "More than" in str(err.value)
diff --git a/src/Test/TestSite.py b/src/Test/TestSite.py
new file mode 100644
index 00000000..05bb2ed9
--- /dev/null
+++ b/src/Test/TestSite.py
@@ -0,0 +1,70 @@
+import shutil
+import os
+
+import pytest
+from Site import SiteManager
+
+TEST_DATA_PATH = "src/Test/testdata"
+
+@pytest.mark.usefixtures("resetSettings")
+class TestSite:
+ def testClone(self, site):
+ assert site.storage.directory == TEST_DATA_PATH + "/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"
+
+ # Remove old files
+ if os.path.isdir(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL"):
+ shutil.rmtree(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL")
+ assert not os.path.isfile(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL/content.json")
+
+ # Clone 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT to 15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc
+ new_site = site.clone(
+ "159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL", "5JU2p5h3R7B1WrbaEdEDNZR7YHqRLGcjNcqwqVQzX2H4SuNe2ee", address_index=1
+ )
+
+ # Check if clone was successful
+ assert new_site.address == "159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL"
+ assert new_site.storage.isFile("content.json")
+ assert new_site.storage.isFile("index.html")
+ assert new_site.storage.isFile("data/users/content.json")
+ assert new_site.storage.isFile("data/zeroblog.db")
+ assert new_site.storage.verifyFiles()["bad_files"] == [] # No bad files allowed
+ assert new_site.storage.query("SELECT * FROM keyvalue WHERE key = 'title'").fetchone()["value"] == "MyZeroBlog"
+
+ # Optional files should be removed
+
+ assert len(new_site.storage.loadJson("content.json").get("files_optional", {})) == 0
+
+ # Test re-cloning (updating)
+
+ # Changes in non-data files should be overwritten
+ new_site.storage.write("index.html", b"this will be overwritten")
+ assert new_site.storage.read("index.html") == b"this will be overwritten"
+
+ # Changes in data file should be kept after re-cloning
+ changed_contentjson = new_site.storage.loadJson("content.json")
+ changed_contentjson["description"] = "Update Description Test"
+ new_site.storage.writeJson("content.json", changed_contentjson)
+
+ changed_data = new_site.storage.loadJson("data/data.json")
+ changed_data["title"] = "UpdateTest"
+ new_site.storage.writeJson("data/data.json", changed_data)
+
+ # The update should be reflected to database
+ assert new_site.storage.query("SELECT * FROM keyvalue WHERE key = 'title'").fetchone()["value"] == "UpdateTest"
+
+ # Re-clone the site
+ site.log.debug("Re-cloning")
+ site.clone("159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL")
+
+ assert new_site.storage.loadJson("data/data.json")["title"] == "UpdateTest"
+ assert new_site.storage.loadJson("content.json")["description"] == "Update Description Test"
+ assert new_site.storage.read("index.html") != "this will be overwritten"
+
+ # Delete created files
+ new_site.storage.deleteFiles()
+ assert not os.path.isdir(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL")
+
+ # Delete from site registry
+ assert new_site.address in SiteManager.site_manager.sites
+ SiteManager.site_manager.delete(new_site.address)
+ assert new_site.address not in SiteManager.site_manager.sites
diff --git a/src/Test/TestSiteDownload.py b/src/Test/TestSiteDownload.py
new file mode 100644
index 00000000..cd0a4c9f
--- /dev/null
+++ b/src/Test/TestSiteDownload.py
@@ -0,0 +1,562 @@
+import time
+
+import pytest
+import mock
+import gevent
+import gevent.event
+import os
+
+from Connection import ConnectionServer
+from Config import config
+from File import FileRequest
+from File import FileServer
+from Site.Site import Site
+from . import Spy
+
+
+@pytest.mark.usefixtures("resetTempSettings")
+@pytest.mark.usefixtures("resetSettings")
+class TestSiteDownload:
+ def testRename(self, file_server, site, site_temp):
+ assert site.storage.directory == config.data_dir + "/" + site.address
+ assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
+
+ # Init source server
+ site.connection_server = file_server
+ file_server.sites[site.address] = site
+
+ # Init client server
+ client = FileServer(file_server.ip, 1545)
+ client.sites = {site_temp.address: site_temp}
+ site_temp.connection_server = client
+ site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
+
+
+ site_temp.addPeer(file_server.ip, 1544)
+
+ assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
+
+ assert site_temp.storage.isFile("content.json")
+
+ # Rename non-optional file
+ os.rename(site.storage.getPath("data/img/domain.png"), site.storage.getPath("data/img/domain-new.png"))
+
+ site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
+
+ content = site.storage.loadJson("content.json")
+ assert "data/img/domain-new.png" in content["files"]
+ assert "data/img/domain.png" not in content["files"]
+ assert not site_temp.storage.isFile("data/img/domain-new.png")
+ assert site_temp.storage.isFile("data/img/domain.png")
+ settings_before = site_temp.settings
+
+ with Spy.Spy(FileRequest, "route") as requests:
+ site.publish()
+ time.sleep(0.1)
+ assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
+ assert "streamFile" not in [req[1] for req in requests]
+
+ content = site_temp.storage.loadJson("content.json")
+ assert "data/img/domain-new.png" in content["files"]
+ assert "data/img/domain.png" not in content["files"]
+ assert site_temp.storage.isFile("data/img/domain-new.png")
+ assert not site_temp.storage.isFile("data/img/domain.png")
+
+ assert site_temp.settings["size"] == settings_before["size"]
+ assert site_temp.settings["size_optional"] == settings_before["size_optional"]
+
+ assert site_temp.storage.deleteFiles()
+ [connection.close() for connection in file_server.connections]
+
+ def testRenameOptional(self, file_server, site, site_temp):
+ assert site.storage.directory == config.data_dir + "/" + site.address
+ assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
+
+ # Init source server
+ site.connection_server = file_server
+ file_server.sites[site.address] = site
+
+ # Init client server
+ client = FileServer(file_server.ip, 1545)
+ client.sites = {site_temp.address: site_temp}
+ site_temp.connection_server = client
+ site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
+
+
+ site_temp.addPeer(file_server.ip, 1544)
+
+ assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
+
+ assert site_temp.settings["optional_downloaded"] == 0
+
+ site_temp.needFile("data/optional.txt")
+
+ assert site_temp.settings["optional_downloaded"] > 0
+ settings_before = site_temp.settings
+ hashfield_before = site_temp.content_manager.hashfield.tobytes()
+
+ # Rename optional file
+ os.rename(site.storage.getPath("data/optional.txt"), site.storage.getPath("data/optional-new.txt"))
+
+ site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", remove_missing_optional=True)
+
+ content = site.storage.loadJson("content.json")
+ assert "data/optional-new.txt" in content["files_optional"]
+ assert "data/optional.txt" not in content["files_optional"]
+ assert not site_temp.storage.isFile("data/optional-new.txt")
+ assert site_temp.storage.isFile("data/optional.txt")
+
+ with Spy.Spy(FileRequest, "route") as requests:
+ site.publish()
+ time.sleep(0.1)
+ assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
+ assert "streamFile" not in [req[1] for req in requests]
+
+ content = site_temp.storage.loadJson("content.json")
+ assert "data/optional-new.txt" in content["files_optional"]
+ assert "data/optional.txt" not in content["files_optional"]
+ assert site_temp.storage.isFile("data/optional-new.txt")
+ assert not site_temp.storage.isFile("data/optional.txt")
+
+ assert site_temp.settings["size"] == settings_before["size"]
+ assert site_temp.settings["size_optional"] == settings_before["size_optional"]
+ assert site_temp.settings["optional_downloaded"] == settings_before["optional_downloaded"]
+ assert site_temp.content_manager.hashfield.tobytes() == hashfield_before
+
+ assert site_temp.storage.deleteFiles()
+ [connection.close() for connection in file_server.connections]
+
+
+ def testArchivedDownload(self, file_server, site, site_temp):
+ # Init source server
+ site.connection_server = file_server
+ file_server.sites[site.address] = site
+
+ # Init client server
+ client = FileServer(file_server.ip, 1545)
+ client.sites = {site_temp.address: site_temp}
+ site_temp.connection_server = client
+
+ # Download normally
+ site_temp.addPeer(file_server.ip, 1544)
+ assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
+ bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"]
+
+ assert not bad_files
+ assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents
+ assert site_temp.storage.isFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json")
+ assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2
+
+ # Add archived data
+ assert "archived" not in site.content_manager.contents["data/users/content.json"]["user_contents"]
+ assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", time.time()-1)
+
+ site.content_manager.contents["data/users/content.json"]["user_contents"]["archived"] = {"1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q": time.time()}
+ site.content_manager.sign("data/users/content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
+
+ date_archived = site.content_manager.contents["data/users/content.json"]["user_contents"]["archived"]["1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q"]
+ assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived-1)
+ assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived)
+ assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived+1) # Allow user to update archived data later
+
+ # Push archived update
+ assert not "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
+ site.publish()
+ time.sleep(0.1)
+ assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
+
+ # The archived content should disappear from remote client
+ assert "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
+ assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" not in site_temp.content_manager.contents
+ assert not site_temp.storage.isDir("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q")
+ assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 1
+ assert len(list(site_temp.storage.query("SELECT * FROM json WHERE directory LIKE '%1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q%'"))) == 0
+
+ assert site_temp.storage.deleteFiles()
+ [connection.close() for connection in file_server.connections]
+
+ def testArchivedBeforeDownload(self, file_server, site, site_temp):
+ # Init source server
+ site.connection_server = file_server
+ file_server.sites[site.address] = site
+
+ # Init client server
+ client = FileServer(file_server.ip, 1545)
+ client.sites = {site_temp.address: site_temp}
+ site_temp.connection_server = client
+
+ # Download normally
+ site_temp.addPeer(file_server.ip, 1544)
+ assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
+ bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"]
+
+ assert not bad_files
+ assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents
+ assert site_temp.storage.isFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json")
+ assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2
+
+ # Add archived data
+ assert not "archived_before" in site.content_manager.contents["data/users/content.json"]["user_contents"]
+ assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", time.time()-1)
+
+ content_modification_time = site.content_manager.contents["data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json"]["modified"]
+ site.content_manager.contents["data/users/content.json"]["user_contents"]["archived_before"] = content_modification_time
+ site.content_manager.sign("data/users/content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
+
+ date_archived = site.content_manager.contents["data/users/content.json"]["user_contents"]["archived_before"]
+ assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived-1)
+ assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived)
+ assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived+1) # Allow user to update archived data later
+
+ # Push archived update
+ assert not "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
+ site.publish()
+ time.sleep(0.1)
+ assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
+
+ # The archived content should disappear from remote client
+ assert "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
+ assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" not in site_temp.content_manager.contents
+ assert not site_temp.storage.isDir("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q")
+ assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 1
+ assert len(list(site_temp.storage.query("SELECT * FROM json WHERE directory LIKE '%1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q%'"))) == 0
+
+ assert site_temp.storage.deleteFiles()
+ [connection.close() for connection in file_server.connections]
+
+
+ # Test when connected peer has the optional file
+ def testOptionalDownload(self, file_server, site, site_temp):
+ # Init source server
+ site.connection_server = file_server
+ file_server.sites[site.address] = site
+
+ # Init client server
+ client = ConnectionServer(file_server.ip, 1545)
+ site_temp.connection_server = client
+ site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
+
+ site_temp.addPeer(file_server.ip, 1544)
+
+ # Download site
+ assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
+
+ # Download optional data/optional.txt
+ site.storage.verifyFiles(quick_check=True) # Find what optional files we have
+ optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt")
+ assert site.content_manager.hashfield.hasHash(optional_file_info["sha512"])
+ assert not site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"])
+
+ assert not site_temp.storage.isFile("data/optional.txt")
+ assert site.storage.isFile("data/optional.txt")
+ site_temp.needFile("data/optional.txt")
+ assert site_temp.storage.isFile("data/optional.txt")
+
+ # Optional user file
+ assert not site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
+ optional_file_info = site_temp.content_manager.getFileInfo(
+ "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif"
+ )
+ assert site.content_manager.hashfield.hasHash(optional_file_info["sha512"])
+ assert not site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"])
+
+ site_temp.needFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
+ assert site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
+ assert site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"])
+
+ assert site_temp.storage.deleteFiles()
+ [connection.close() for connection in file_server.connections]
+
+ # Test when connected peer does not has the file, so ask him if he know someone who has it
+ def testFindOptional(self, file_server, site, site_temp):
+ # Init source server
+ site.connection_server = file_server
+ file_server.sites[site.address] = site
+
+ # Init full source server (has optional files)
+ site_full = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
+ file_server_full = FileServer(file_server.ip, 1546)
+ site_full.connection_server = file_server_full
+
+ def listen():
+ ConnectionServer.start(file_server_full)
+ ConnectionServer.listen(file_server_full)
+
+ gevent.spawn(listen)
+ time.sleep(0.001) # Port opening
+ file_server_full.sites[site_full.address] = site_full # Add site
+ site_full.storage.verifyFiles(quick_check=True) # Check optional files
+ site_full_peer = site.addPeer(file_server.ip, 1546) # Add it to source server
+ hashfield = site_full_peer.updateHashfield() # Update hashfield
+ assert len(site_full.content_manager.hashfield) == 8
+ assert hashfield
+ assert site_full.storage.isFile("data/optional.txt")
+ assert site_full.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
+ assert len(site_full_peer.hashfield) == 8
+
+ # Remove hashes from source server
+ for hash in list(site.content_manager.hashfield):
+ site.content_manager.hashfield.remove(hash)
+
+ # Init client server
+ site_temp.connection_server = ConnectionServer(file_server.ip, 1545)
+ site_temp.addPeer(file_server.ip, 1544) # Add source server
+
+ # Download normal files
+ site_temp.log.info("Start Downloading site")
+ assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
+
+ # Download optional data/optional.txt
+ optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt")
+ optional_file_info2 = site_temp.content_manager.getFileInfo("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
+ assert not site_temp.storage.isFile("data/optional.txt")
+ assert not site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
+ assert not site.content_manager.hashfield.hasHash(optional_file_info["sha512"]) # Source server don't know he has the file
+ assert not site.content_manager.hashfield.hasHash(optional_file_info2["sha512"]) # Source server don't know he has the file
+ assert site_full_peer.hashfield.hasHash(optional_file_info["sha512"]) # Source full peer on source server has the file
+ assert site_full_peer.hashfield.hasHash(optional_file_info2["sha512"]) # Source full peer on source server has the file
+ assert site_full.content_manager.hashfield.hasHash(optional_file_info["sha512"]) # Source full server he has the file
+ assert site_full.content_manager.hashfield.hasHash(optional_file_info2["sha512"]) # Source full server he has the file
+
+ site_temp.log.info("Request optional files")
+ with Spy.Spy(FileRequest, "route") as requests:
+ # Request 2 file same time
+ threads = []
+ threads.append(site_temp.needFile("data/optional.txt", blocking=False))
+ threads.append(site_temp.needFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif", blocking=False))
+ gevent.joinall(threads)
+
+ assert len([request for request in requests if request[1] == "findHashIds"]) == 1 # findHashids should call only once
+
+ assert site_temp.storage.isFile("data/optional.txt")
+ assert site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
+
+ assert site_temp.storage.deleteFiles()
+ file_server_full.stop()
+ [connection.close() for connection in file_server.connections]
+ site_full.content_manager.contents.db.close("FindOptional test end")
+
+ def testUpdate(self, file_server, site, site_temp):
+ assert site.storage.directory == config.data_dir + "/" + site.address
+ assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
+
+ # Init source server
+ site.connection_server = file_server
+ file_server.sites[site.address] = site
+
+ # Init client server
+ client = FileServer(file_server.ip, 1545)
+ client.sites = {site_temp.address: site_temp}
+ site_temp.connection_server = client
+
+ # Don't try to find peers from the net
+ site.announce = mock.MagicMock(return_value=True)
+ site_temp.announce = mock.MagicMock(return_value=True)
+
+ # Connect peers
+ site_temp.addPeer(file_server.ip, 1544)
+
+ # Download site from site to site_temp
+ assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
+ assert len(site_temp.bad_files) == 1
+
+ # Update file
+ data_original = site.storage.open("data/data.json").read()
+ data_new = data_original.replace(b'"ZeroBlog"', b'"UpdatedZeroBlog"')
+ assert data_original != data_new
+
+ site.storage.open("data/data.json", "wb").write(data_new)
+
+ assert site.storage.open("data/data.json").read() == data_new
+ assert site_temp.storage.open("data/data.json").read() == data_original
+
+ site.log.info("Publish new data.json without patch")
+ # Publish without patch
+ with Spy.Spy(FileRequest, "route") as requests:
+ site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
+ site.publish()
+ time.sleep(0.1)
+ site.log.info("Downloading site")
+ assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
+ assert len([request for request in requests if request[1] in ("getFile", "streamFile")]) == 1
+
+ assert site_temp.storage.open("data/data.json").read() == data_new
+
+ # Close connection to avoid update spam limit
+ list(site.peers.values())[0].remove()
+ site.addPeer(file_server.ip, 1545)
+ list(site_temp.peers.values())[0].ping() # Connect back
+ time.sleep(0.1)
+
+ # Update with patch
+ data_new = data_original.replace(b'"ZeroBlog"', b'"PatchedZeroBlog"')
+ assert data_original != data_new
+
+ site.storage.open("data/data.json-new", "wb").write(data_new)
+
+ assert site.storage.open("data/data.json-new").read() == data_new
+ assert site_temp.storage.open("data/data.json").read() != data_new
+
+ # Generate diff
+ diffs = site.content_manager.getDiffs("content.json")
+ assert not site.storage.isFile("data/data.json-new") # New data file removed
+ assert site.storage.open("data/data.json").read() == data_new # -new postfix removed
+ assert "data/data.json" in diffs
+ assert diffs["data/data.json"] == [('=', 2), ('-', 29), ('+', [b'\t"title": "PatchedZeroBlog",\n']), ('=', 31102)]
+
+ # Publish with patch
+ site.log.info("Publish new data.json with patch")
+ with Spy.Spy(FileRequest, "route") as requests:
+ site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
+
+ event_done = gevent.event.AsyncResult()
+ site.publish(diffs=diffs)
+ time.sleep(0.1)
+ assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
+ assert [request for request in requests if request[1] in ("getFile", "streamFile")] == []
+
+ assert site_temp.storage.open("data/data.json").read() == data_new
+
+ assert site_temp.storage.deleteFiles()
+ [connection.close() for connection in file_server.connections]
+
+ def testBigUpdate(self, file_server, site, site_temp):
+ # Init source server
+ site.connection_server = file_server
+ file_server.sites[site.address] = site
+
+ # Init client server
+ client = FileServer(file_server.ip, 1545)
+ client.sites = {site_temp.address: site_temp}
+ site_temp.connection_server = client
+
+ # Connect peers
+ site_temp.addPeer(file_server.ip, 1544)
+
+ # Download site from site to site_temp
+ assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
+ assert list(site_temp.bad_files.keys()) == ["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"]
+
+ # Update file
+ data_original = site.storage.open("data/data.json").read()
+ data_new = data_original.replace(b'"ZeroBlog"', b'"PatchedZeroBlog"')
+ assert data_original != data_new
+
+ site.storage.open("data/data.json-new", "wb").write(data_new)
+
+ assert site.storage.open("data/data.json-new").read() == data_new
+ assert site_temp.storage.open("data/data.json").read() != data_new
+
+ # Generate diff
+ diffs = site.content_manager.getDiffs("content.json")
+ assert not site.storage.isFile("data/data.json-new") # New data file removed
+ assert site.storage.open("data/data.json").read() == data_new # -new postfix removed
+ assert "data/data.json" in diffs
+
+ content_json = site.storage.loadJson("content.json")
+ content_json["description"] = "BigZeroBlog" * 1024 * 10
+ site.storage.writeJson("content.json", content_json)
+ site.content_manager.loadContent("content.json", force=True)
+
+ # Publish with patch
+ site.log.info("Publish new data.json with patch")
+ with Spy.Spy(FileRequest, "route") as requests:
+ site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
+ assert site.storage.getSize("content.json") > 10 * 1024 # Make it a big content.json
+ site.publish(diffs=diffs)
+ time.sleep(0.1)
+ assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
+ file_requests = [request for request in requests if request[1] in ("getFile", "streamFile")]
+ assert len(file_requests) == 1
+
+ assert site_temp.storage.open("data/data.json").read() == data_new
+ assert site_temp.storage.open("content.json").read() == site.storage.open("content.json").read()
+
+ # Test what happened if the content.json of the site is bigger than the site limit
+ def testHugeContentSiteUpdate(self, file_server, site, site_temp):
+ # Init source server
+ site.connection_server = file_server
+ file_server.sites[site.address] = site
+
+ # Init client server
+ client = FileServer(file_server.ip, 1545)
+ client.sites = {site_temp.address: site_temp}
+ site_temp.connection_server = client
+
+ # Connect peers
+ site_temp.addPeer(file_server.ip, 1544)
+
+ # Download site from site to site_temp
+ assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
+ site_temp.settings["size_limit"] = int(20 * 1024 *1024)
+ site_temp.saveSettings()
+
+ # Raise limit size to 20MB on site so it can be signed
+ site.settings["size_limit"] = int(20 * 1024 *1024)
+ site.saveSettings()
+
+ content_json = site.storage.loadJson("content.json")
+ content_json["description"] = "PartirUnJour" * 1024 * 1024
+ site.storage.writeJson("content.json", content_json)
+ changed, deleted = site.content_manager.loadContent("content.json", force=True)
+
+ # Make sure we have 2 differents content.json
+ assert site_temp.storage.open("content.json").read() != site.storage.open("content.json").read()
+
+ # Generate diff
+ diffs = site.content_manager.getDiffs("content.json")
+
+ # Publish with patch
+ site.log.info("Publish new content.json bigger than 10MB")
+ with Spy.Spy(FileRequest, "route") as requests:
+ site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
+ assert site.storage.getSize("content.json") > 10 * 1024 * 1024 # verify it over 10MB
+ time.sleep(0.1)
+ site.publish(diffs=diffs)
+ assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
+
+ assert site_temp.storage.getSize("content.json") < site_temp.getSizeLimit() * 1024 * 1024
+ assert site_temp.storage.open("content.json").read() == site.storage.open("content.json").read()
+
+ def testUnicodeFilename(self, file_server, site, site_temp):
+ assert site.storage.directory == config.data_dir + "/" + site.address
+ assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
+
+ # Init source server
+ site.connection_server = file_server
+ file_server.sites[site.address] = site
+
+ # Init client server
+ client = FileServer(file_server.ip, 1545)
+ client.sites = {site_temp.address: site_temp}
+ site_temp.connection_server = client
+ site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
+
+ site_temp.addPeer(file_server.ip, 1544)
+
+ assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
+
+ site.storage.write("data/img/árvíztűrő.png", b"test")
+
+ site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
+
+ content = site.storage.loadJson("content.json")
+ assert "data/img/árvíztűrő.png" in content["files"]
+ assert not site_temp.storage.isFile("data/img/árvíztűrő.png")
+ settings_before = site_temp.settings
+
+ with Spy.Spy(FileRequest, "route") as requests:
+ site.publish()
+ time.sleep(0.1)
+ assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
+ assert len([req[1] for req in requests if req[1] == "streamFile"]) == 1
+
+ content = site_temp.storage.loadJson("content.json")
+ assert "data/img/árvíztűrő.png" in content["files"]
+ assert site_temp.storage.isFile("data/img/árvíztűrő.png")
+
+ assert site_temp.settings["size"] == settings_before["size"]
+ assert site_temp.settings["size_optional"] == settings_before["size_optional"]
+
+ assert site_temp.storage.deleteFiles()
+ [connection.close() for connection in file_server.connections]
diff --git a/src/Test/TestSiteStorage.py b/src/Test/TestSiteStorage.py
new file mode 100644
index 00000000..f11262bf
--- /dev/null
+++ b/src/Test/TestSiteStorage.py
@@ -0,0 +1,25 @@
+import pytest
+
+
+@pytest.mark.usefixtures("resetSettings")
+class TestSiteStorage:
+ def testWalk(self, site):
+ # Rootdir
+ walk_root = list(site.storage.walk(""))
+ assert "content.json" in walk_root
+ assert "css/all.css" in walk_root
+
+ # Subdir
+ assert list(site.storage.walk("data-default")) == ["data.json", "users/content-default.json"]
+
+ def testList(self, site):
+ # Rootdir
+ list_root = list(site.storage.list(""))
+ assert "content.json" in list_root
+ assert "css/all.css" not in list_root
+
+ # Subdir
+ assert set(site.storage.list("data-default")) == set(["data.json", "users"])
+
+ def testDbRebuild(self, site):
+ assert site.storage.rebuildDb()
diff --git a/src/Test/TestThreadPool.py b/src/Test/TestThreadPool.py
new file mode 100644
index 00000000..5e95005e
--- /dev/null
+++ b/src/Test/TestThreadPool.py
@@ -0,0 +1,163 @@
+import time
+import threading
+
+import gevent
+import pytest
+
+from util import ThreadPool
+
+
+class TestThreadPool:
+ def testExecutionOrder(self):
+ with ThreadPool.ThreadPool(4) as pool:
+ events = []
+
+ @pool.wrap
+ def blocker():
+ events.append("S")
+ out = 0
+ for i in range(10000000):
+ if i == 3000000:
+ events.append("M")
+ out += 1
+ events.append("D")
+ return out
+
+ threads = []
+ for i in range(3):
+ threads.append(gevent.spawn(blocker))
+ gevent.joinall(threads)
+
+ assert events == ["S"] * 3 + ["M"] * 3 + ["D"] * 3
+
+ res = blocker()
+ assert res == 10000000
+
+ def testLockBlockingSameThread(self):
+ lock = ThreadPool.Lock()
+
+ s = time.time()
+
+ def unlocker():
+ time.sleep(1)
+ lock.release()
+
+ gevent.spawn(unlocker)
+ lock.acquire(True)
+ lock.acquire(True, timeout=2)
+
+ unlock_taken = time.time() - s
+
+ assert 1.0 < unlock_taken < 1.5
+
+ def testLockBlockingDifferentThread(self):
+ lock = ThreadPool.Lock()
+
+ def locker():
+ lock.acquire(True)
+ time.sleep(0.5)
+ lock.release()
+
+ with ThreadPool.ThreadPool(10) as pool:
+ threads = [
+ pool.spawn(locker),
+ pool.spawn(locker),
+ gevent.spawn(locker),
+ pool.spawn(locker)
+ ]
+ time.sleep(0.1)
+
+ s = time.time()
+
+ lock.acquire(True, 5.0)
+
+ unlock_taken = time.time() - s
+
+ assert 1.8 < unlock_taken < 2.2
+
+ gevent.joinall(threads)
+
+ def testMainLoopCallerThreadId(self):
+ main_thread_id = threading.current_thread().ident
+ with ThreadPool.ThreadPool(5) as pool:
+ def getThreadId(*args, **kwargs):
+ return threading.current_thread().ident
+
+ t = pool.spawn(getThreadId)
+ assert t.get() != main_thread_id
+
+ t = pool.spawn(lambda: ThreadPool.main_loop.call(getThreadId))
+ assert t.get() == main_thread_id
+
+ def testMainLoopCallerGeventSpawn(self):
+ main_thread_id = threading.current_thread().ident
+ with ThreadPool.ThreadPool(5) as pool:
+ def waiter():
+ time.sleep(1)
+ return threading.current_thread().ident
+
+ def geventSpawner():
+ event = ThreadPool.main_loop.call(gevent.spawn, waiter)
+
+ with pytest.raises(Exception) as greenlet_err:
+ event.get()
+ assert str(greenlet_err.value) == "cannot switch to a different thread"
+
+ waiter_thread_id = ThreadPool.main_loop.call(event.get)
+ return waiter_thread_id
+
+ s = time.time()
+ waiter_thread_id = pool.apply(geventSpawner)
+ assert main_thread_id == waiter_thread_id
+ time_taken = time.time() - s
+ assert 0.9 < time_taken < 1.2
+
+ def testEvent(self):
+ with ThreadPool.ThreadPool(5) as pool:
+ event = ThreadPool.Event()
+
+ def setter():
+ time.sleep(1)
+ event.set("done!")
+
+ def getter():
+ return event.get()
+
+ pool.spawn(setter)
+ t_gevent = gevent.spawn(getter)
+ t_pool = pool.spawn(getter)
+ s = time.time()
+ assert event.get() == "done!"
+ time_taken = time.time() - s
+ gevent.joinall([t_gevent, t_pool])
+
+ assert t_gevent.get() == "done!"
+ assert t_pool.get() == "done!"
+
+ assert 0.9 < time_taken < 1.2
+
+ with pytest.raises(Exception) as err:
+ event.set("another result")
+
+ assert "Event already has value" in str(err.value)
+
+ def testMemoryLeak(self):
+ import gc
+ thread_objs_before = [id(obj) for obj in gc.get_objects() if "threadpool" in str(type(obj))]
+
+ def worker():
+ time.sleep(0.1)
+ return "ok"
+
+ def poolTest():
+ with ThreadPool.ThreadPool(5) as pool:
+ for i in range(20):
+ pool.spawn(worker)
+
+ for i in range(5):
+ poolTest()
+ new_thread_objs = [obj for obj in gc.get_objects() if "threadpool" in str(type(obj)) and id(obj) not in thread_objs_before]
+ #print("New objs:", new_thread_objs, "run:", num_run)
+
+ # Make sure no threadpool object left behind
+ assert not new_thread_objs
diff --git a/src/Test/TestTor.py b/src/Test/TestTor.py
new file mode 100644
index 00000000..0252d73a
--- /dev/null
+++ b/src/Test/TestTor.py
@@ -0,0 +1,153 @@
+import time
+
+import pytest
+import mock
+
+from File import FileServer
+from Crypt import CryptRsa
+from Config import config
+
+@pytest.mark.usefixtures("resetSettings")
+@pytest.mark.usefixtures("resetTempSettings")
+class TestTor:
+ def testDownload(self, tor_manager):
+ for retry in range(15):
+ time.sleep(1)
+ if tor_manager.enabled and tor_manager.conn:
+ break
+ assert tor_manager.enabled
+
+ def testManagerConnection(self, tor_manager):
+ assert "250-version" in tor_manager.request("GETINFO version")
+
+ def testAddOnion(self, tor_manager):
+ # Add
+ address = tor_manager.addOnion()
+ assert address
+ assert address in tor_manager.privatekeys
+
+ # Delete
+ assert tor_manager.delOnion(address)
+ assert address not in tor_manager.privatekeys
+
+ def testSignOnion(self, tor_manager):
+ address = tor_manager.addOnion()
+
+ # Sign
+ sign = CryptRsa.sign(b"hello", tor_manager.getPrivatekey(address))
+ assert len(sign) == 128
+
+ # Verify
+ publickey = CryptRsa.privatekeyToPublickey(tor_manager.getPrivatekey(address))
+ assert len(publickey) == 140
+ assert CryptRsa.verify(b"hello", publickey, sign)
+ assert not CryptRsa.verify(b"not hello", publickey, sign)
+
+ # Pub to address
+ assert CryptRsa.publickeyToOnion(publickey) == address
+
+ # Delete
+ tor_manager.delOnion(address)
+
+ @pytest.mark.slow
+ def testConnection(self, tor_manager, file_server, site, site_temp):
+ file_server.tor_manager.start_onions = True
+ address = file_server.tor_manager.getOnion(site.address)
+ assert address
+ print("Connecting to", address)
+ for retry in range(5): # Wait for hidden service creation
+ time.sleep(10)
+ try:
+ connection = file_server.getConnection(address + ".onion", 1544)
+ if connection:
+ break
+ except Exception as err:
+ continue
+ assert connection.handshake
+ assert not connection.handshake["peer_id"] # No peer_id for Tor connections
+
+ # Return the same connection without site specified
+ assert file_server.getConnection(address + ".onion", 1544) == connection
+ # No reuse for different site
+ assert file_server.getConnection(address + ".onion", 1544, site=site) != connection
+ assert file_server.getConnection(address + ".onion", 1544, site=site) == file_server.getConnection(address + ".onion", 1544, site=site)
+ site_temp.address = "1OTHERSITE"
+ assert file_server.getConnection(address + ".onion", 1544, site=site) != file_server.getConnection(address + ".onion", 1544, site=site_temp)
+
+ # Only allow to query from the locked site
+ file_server.sites[site.address] = site
+ connection_locked = file_server.getConnection(address + ".onion", 1544, site=site)
+ assert "body" in connection_locked.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0})
+ assert connection_locked.request("getFile", {"site": "1OTHERSITE", "inner_path": "content.json", "location": 0})["error"] == "Invalid site"
+
+ def testPex(self, file_server, site, site_temp):
+ # Register site to currently running fileserver
+ site.connection_server = file_server
+ file_server.sites[site.address] = site
+ # Create a new file server to emulate new peer connecting to our peer
+ file_server_temp = FileServer(file_server.ip, 1545)
+ site_temp.connection_server = file_server_temp
+ file_server_temp.sites[site_temp.address] = site_temp
+
+ # We will request peers from this
+ peer_source = site_temp.addPeer(file_server.ip, 1544)
+
+ # Get ip4 peers from source site
+ site.addPeer("1.2.3.4", 1555) # Add peer to source site
+ assert peer_source.pex(need_num=10) == 1
+ assert len(site_temp.peers) == 2
+ assert "1.2.3.4:1555" in site_temp.peers
+
+ # Get onion peers from source site
+ site.addPeer("bka4ht2bzxchy44r.onion", 1555)
+ assert "bka4ht2bzxchy44r.onion:1555" not in site_temp.peers
+
+ # Don't add onion peers if not supported
+ assert "onion" not in file_server_temp.supported_ip_types
+ assert peer_source.pex(need_num=10) == 0
+
+ file_server_temp.supported_ip_types.append("onion")
+ assert peer_source.pex(need_num=10) == 1
+
+ assert "bka4ht2bzxchy44r.onion:1555" in site_temp.peers
+
+ def testFindHash(self, tor_manager, file_server, site, site_temp):
+ file_server.ip_incoming = {} # Reset flood protection
+ file_server.sites[site.address] = site
+ file_server.tor_manager = tor_manager
+
+ client = FileServer(file_server.ip, 1545)
+ client.sites = {site_temp.address: site_temp}
+ site_temp.connection_server = client
+
+ # Add file_server as peer to client
+ peer_file_server = site_temp.addPeer(file_server.ip, 1544)
+
+ assert peer_file_server.findHashIds([1234]) == {}
+
+ # Add fake peer with requred hash
+ fake_peer_1 = site.addPeer("bka4ht2bzxchy44r.onion", 1544)
+ fake_peer_1.hashfield.append(1234)
+ fake_peer_2 = site.addPeer("1.2.3.5", 1545)
+ fake_peer_2.hashfield.append(1234)
+ fake_peer_2.hashfield.append(1235)
+ fake_peer_3 = site.addPeer("1.2.3.6", 1546)
+ fake_peer_3.hashfield.append(1235)
+ fake_peer_3.hashfield.append(1236)
+
+ res = peer_file_server.findHashIds([1234, 1235])
+
+ assert sorted(res[1234]) == [('1.2.3.5', 1545), ("bka4ht2bzxchy44r.onion", 1544)]
+ assert sorted(res[1235]) == [('1.2.3.5', 1545), ('1.2.3.6', 1546)]
+
+ # Test my address adding
+ site.content_manager.hashfield.append(1234)
+
+ res = peer_file_server.findHashIds([1234, 1235])
+ assert sorted(res[1234]) == [('1.2.3.5', 1545), (file_server.ip, 1544), ("bka4ht2bzxchy44r.onion", 1544)]
+ assert sorted(res[1235]) == [('1.2.3.5', 1545), ('1.2.3.6', 1546)]
+
+ def testSiteOnion(self, tor_manager):
+ with mock.patch.object(config, "tor", "always"):
+ assert tor_manager.getOnion("address1") != tor_manager.getOnion("address2")
+ assert tor_manager.getOnion("address1") == tor_manager.getOnion("address1")
diff --git a/src/Test/TestTranslate.py b/src/Test/TestTranslate.py
new file mode 100644
index 00000000..348a65a6
--- /dev/null
+++ b/src/Test/TestTranslate.py
@@ -0,0 +1,61 @@
+from Translate import Translate
+
+class TestTranslate:
+ def testTranslateStrict(self):
+ translate = Translate()
+ data = """
+ translated = _("original")
+ not_translated = "original"
+ """
+ data_translated = translate.translateData(data, {"_(original)": "translated"})
+ assert 'translated = _("translated")' in data_translated
+ assert 'not_translated = "original"' in data_translated
+
+ def testTranslateStrictNamed(self):
+ translate = Translate()
+ data = """
+ translated = _("original", "original named")
+ translated_other = _("original", "original other named")
+ not_translated = "original"
+ """
+ data_translated = translate.translateData(data, {"_(original, original named)": "translated"})
+ assert 'translated = _("translated")' in data_translated
+ assert 'not_translated = "original"' in data_translated
+
+ def testTranslateUtf8(self):
+ translate = Translate()
+ data = """
+ greeting = "Hi again árvztűrőtökörfúrógép!"
+ """
+ data_translated = translate.translateData(data, {"Hi again árvztűrőtökörfúrógép!": "Üdv újra árvztűrőtökörfúrógép!"})
+ assert data_translated == """
+ greeting = "Üdv újra árvztűrőtökörfúrógép!"
+ """
+
+ def testTranslateEscape(self):
+ _ = Translate()
+ _["Hello"] = "Szia"
+
+ # Simple escaping
+ data = "{_[Hello]} {username}!"
+ username = "Hacker"
+ data_translated = _(data)
+ assert 'Szia' in data_translated
+ assert '<' not in data_translated
+ assert data_translated == "Szia Hacker<script>alert('boom')</script>!"
+
+ # Escaping dicts
+ user = {"username": "Hacker"}
+ data = "{_[Hello]} {user[username]}!"
+ data_translated = _(data)
+ assert 'Szia' in data_translated
+ assert '<' not in data_translated
+ assert data_translated == "Szia Hacker<script>alert('boom')</script>!"
+
+ # Escaping lists
+ users = [{"username": "Hacker"}]
+ data = "{_[Hello]} {users[0][username]}!"
+ data_translated = _(data)
+ assert 'Szia' in data_translated
+ assert '<' not in data_translated
+ assert data_translated == "Szia Hacker<script>alert('boom')</script>!"
diff --git a/src/Test/TestUiWebsocket.py b/src/Test/TestUiWebsocket.py
new file mode 100644
index 00000000..d2d23d03
--- /dev/null
+++ b/src/Test/TestUiWebsocket.py
@@ -0,0 +1,11 @@
+import sys
+import pytest
+
+@pytest.mark.usefixtures("resetSettings")
+class TestUiWebsocket:
+ def testPermission(self, ui_websocket):
+ res = ui_websocket.testAction("ping")
+ assert res == "pong"
+
+ res = ui_websocket.testAction("certList")
+ assert "You don't have permission" in res["error"]
diff --git a/src/Test/TestUpnpPunch.py b/src/Test/TestUpnpPunch.py
new file mode 100644
index 00000000..f17c77bd
--- /dev/null
+++ b/src/Test/TestUpnpPunch.py
@@ -0,0 +1,274 @@
+import socket
+from urllib.parse import urlparse
+
+import pytest
+import mock
+
+from util import UpnpPunch as upnp
+
+
+@pytest.fixture
+def mock_socket():
+ mock_socket = mock.MagicMock()
+ mock_socket.recv = mock.MagicMock(return_value=b'Hello')
+ mock_socket.bind = mock.MagicMock()
+ mock_socket.send_to = mock.MagicMock()
+
+ return mock_socket
+
+
+@pytest.fixture
+def url_obj():
+ return urlparse('http://192.168.1.1/ctrlPoint.xml')
+
+
+@pytest.fixture(params=['WANPPPConnection', 'WANIPConnection'])
+def igd_profile(request):
+ return """
+ urn:schemas-upnp-org:service:{}:1
+ urn:upnp-org:serviceId:wanpppc:pppoa
+ /upnp/control/wanpppcpppoa
+ /upnp/event/wanpppcpppoa
+ /WANPPPConnection.xml
+""".format(request.param)
+
+
+@pytest.fixture
+def httplib_response():
+ class FakeResponse(object):
+ def __init__(self, status=200, body='OK'):
+ self.status = status
+ self.body = body
+
+ def read(self):
+ return self.body
+ return FakeResponse
+
+
+class TestUpnpPunch(object):
+ def test_perform_m_search(self, mock_socket):
+ local_ip = '127.0.0.1'
+
+ with mock.patch('util.UpnpPunch.socket.socket',
+ return_value=mock_socket):
+ result = upnp.perform_m_search(local_ip)
+ assert result == 'Hello'
+ assert local_ip == mock_socket.bind.call_args_list[0][0][0][0]
+ assert ('239.255.255.250',
+ 1900) == mock_socket.sendto.call_args_list[0][0][1]
+
+ def test_perform_m_search_socket_error(self, mock_socket):
+ mock_socket.recv.side_effect = socket.error('Timeout error')
+
+ with mock.patch('util.UpnpPunch.socket.socket',
+ return_value=mock_socket):
+ with pytest.raises(upnp.UpnpError):
+ upnp.perform_m_search('127.0.0.1')
+
+ def test_retrieve_location_from_ssdp(self, url_obj):
+ ctrl_location = url_obj.geturl()
+ parsed_location = urlparse(ctrl_location)
+ rsp = ('auth: gibberish\r\nlocation: {0}\r\n'
+ 'Content-Type: text/html\r\n\r\n').format(ctrl_location)
+ result = upnp._retrieve_location_from_ssdp(rsp)
+ assert result == parsed_location
+
+ def test_retrieve_location_from_ssdp_no_header(self):
+ rsp = 'auth: gibberish\r\nContent-Type: application/json\r\n\r\n'
+ with pytest.raises(upnp.IGDError):
+ upnp._retrieve_location_from_ssdp(rsp)
+
+ def test_retrieve_igd_profile(self, url_obj):
+ with mock.patch('urllib.request.urlopen') as mock_urlopen:
+ upnp._retrieve_igd_profile(url_obj)
+ mock_urlopen.assert_called_with(url_obj.geturl(), timeout=5)
+
+ def test_retrieve_igd_profile_timeout(self, url_obj):
+ with mock.patch('urllib.request.urlopen') as mock_urlopen:
+ mock_urlopen.side_effect = socket.error('Timeout error')
+ with pytest.raises(upnp.IGDError):
+ upnp._retrieve_igd_profile(url_obj)
+
+ def test_parse_igd_profile_service_type(self, igd_profile):
+ control_path, upnp_schema = upnp._parse_igd_profile(igd_profile)
+ assert control_path == '/upnp/control/wanpppcpppoa'
+ assert upnp_schema in ('WANPPPConnection', 'WANIPConnection',)
+
+ def test_parse_igd_profile_no_ctrlurl(self, igd_profile):
+ igd_profile = igd_profile.replace('controlURL', 'nope')
+ with pytest.raises(upnp.IGDError):
+ control_path, upnp_schema = upnp._parse_igd_profile(igd_profile)
+
+ def test_parse_igd_profile_no_schema(self, igd_profile):
+ igd_profile = igd_profile.replace('Connection', 'nope')
+ with pytest.raises(upnp.IGDError):
+ control_path, upnp_schema = upnp._parse_igd_profile(igd_profile)
+
+ def test_create_open_message_parsable(self):
+ from xml.parsers.expat import ExpatError
+ msg, _ = upnp._create_open_message('127.0.0.1', 8888)
+ try:
+ upnp.parseString(msg)
+ except ExpatError as e:
+ pytest.fail('Incorrect XML message: {}'.format(e))
+
+ def test_create_open_message_contains_right_stuff(self):
+ settings = {'description': 'test desc',
+ 'protocol': 'test proto',
+ 'upnp_schema': 'test schema'}
+ msg, fn_name = upnp._create_open_message('127.0.0.1', 8888, **settings)
+ assert fn_name == 'AddPortMapping'
+ assert '127.0.0.1' in msg
+ assert '8888' in msg
+ assert settings['description'] in msg
+ assert settings['protocol'] in msg
+ assert settings['upnp_schema'] in msg
+
+ def test_parse_for_errors_bad_rsp(self, httplib_response):
+ rsp = httplib_response(status=500)
+ with pytest.raises(upnp.IGDError) as err:
+ upnp._parse_for_errors(rsp)
+ assert 'Unable to parse' in str(err.value)
+
+ def test_parse_for_errors_error(self, httplib_response):
+ soap_error = (''
+ '500'
+ 'Bad request'
+ '')
+ rsp = httplib_response(status=500, body=soap_error)
+ with pytest.raises(upnp.IGDError) as err:
+ upnp._parse_for_errors(rsp)
+ assert 'SOAP request error' in str(err.value)
+
+ def test_parse_for_errors_good_rsp(self, httplib_response):
+ rsp = httplib_response(status=200)
+ assert rsp == upnp._parse_for_errors(rsp)
+
+ def test_send_requests_success(self):
+ with mock.patch(
+ 'util.UpnpPunch._send_soap_request') as mock_send_request:
+ mock_send_request.return_value = mock.MagicMock(status=200)
+ upnp._send_requests(['msg'], None, None, None)
+
+ assert mock_send_request.called
+
+ def test_send_requests_failed(self):
+ with mock.patch(
+ 'util.UpnpPunch._send_soap_request') as mock_send_request:
+ mock_send_request.return_value = mock.MagicMock(status=500)
+ with pytest.raises(upnp.UpnpError):
+ upnp._send_requests(['msg'], None, None, None)
+
+ assert mock_send_request.called
+
+ def test_collect_idg_data(self):
+ pass
+
+ @mock.patch('util.UpnpPunch._get_local_ips')
+ @mock.patch('util.UpnpPunch._collect_idg_data')
+ @mock.patch('util.UpnpPunch._send_requests')
+ def test_ask_to_open_port_success(self, mock_send_requests,
+ mock_collect_idg, mock_local_ips):
+ mock_collect_idg.return_value = {'upnp_schema': 'schema-yo'}
+ mock_local_ips.return_value = ['192.168.0.12']
+
+ result = upnp.ask_to_open_port(retries=5)
+
+ soap_msg = mock_send_requests.call_args[0][0][0][0]
+
+ assert result is True
+
+ assert mock_collect_idg.called
+ assert '192.168.0.12' in soap_msg
+ assert '15441' in soap_msg
+ assert 'schema-yo' in soap_msg
+
+ @mock.patch('util.UpnpPunch._get_local_ips')
+ @mock.patch('util.UpnpPunch._collect_idg_data')
+ @mock.patch('util.UpnpPunch._send_requests')
+ def test_ask_to_open_port_failure(self, mock_send_requests,
+ mock_collect_idg, mock_local_ips):
+ mock_local_ips.return_value = ['192.168.0.12']
+ mock_collect_idg.return_value = {'upnp_schema': 'schema-yo'}
+ mock_send_requests.side_effect = upnp.UpnpError()
+
+ with pytest.raises(upnp.UpnpError):
+ upnp.ask_to_open_port()
+
+ @mock.patch('util.UpnpPunch._collect_idg_data')
+ @mock.patch('util.UpnpPunch._send_requests')
+ def test_orchestrate_soap_request(self, mock_send_requests,
+ mock_collect_idg):
+ soap_mock = mock.MagicMock()
+ args = ['127.0.0.1', 31337, soap_mock, 'upnp-test', {'upnp_schema':
+ 'schema-yo'}]
+ mock_collect_idg.return_value = args[-1]
+
+ upnp._orchestrate_soap_request(*args[:-1])
+
+ assert mock_collect_idg.called
+ soap_mock.assert_called_with(
+ *args[:2] + ['upnp-test', 'UDP', 'schema-yo'])
+ assert mock_send_requests.called
+
+ @mock.patch('util.UpnpPunch._collect_idg_data')
+ @mock.patch('util.UpnpPunch._send_requests')
+ def test_orchestrate_soap_request_without_desc(self, mock_send_requests,
+ mock_collect_idg):
+ soap_mock = mock.MagicMock()
+ args = ['127.0.0.1', 31337, soap_mock, {'upnp_schema': 'schema-yo'}]
+ mock_collect_idg.return_value = args[-1]
+
+ upnp._orchestrate_soap_request(*args[:-1])
+
+ assert mock_collect_idg.called
+ soap_mock.assert_called_with(*args[:2] + [None, 'UDP', 'schema-yo'])
+ assert mock_send_requests.called
+
+ def test_create_close_message_parsable(self):
+ from xml.parsers.expat import ExpatError
+ msg, _ = upnp._create_close_message('127.0.0.1', 8888)
+ try:
+ upnp.parseString(msg)
+ except ExpatError as e:
+ pytest.fail('Incorrect XML message: {}'.format(e))
+
+ def test_create_close_message_contains_right_stuff(self):
+ settings = {'protocol': 'test proto',
+ 'upnp_schema': 'test schema'}
+ msg, fn_name = upnp._create_close_message('127.0.0.1', 8888, **
+ settings)
+ assert fn_name == 'DeletePortMapping'
+ assert '8888' in msg
+ assert settings['protocol'] in msg
+ assert settings['upnp_schema'] in msg
+
+ @mock.patch('util.UpnpPunch._get_local_ips')
+ @mock.patch('util.UpnpPunch._orchestrate_soap_request')
+ def test_communicate_with_igd_success(self, mock_orchestrate,
+ mock_get_local_ips):
+ mock_get_local_ips.return_value = ['192.168.0.12']
+ upnp._communicate_with_igd()
+ assert mock_get_local_ips.called
+ assert mock_orchestrate.called
+
+ @mock.patch('util.UpnpPunch._get_local_ips')
+ @mock.patch('util.UpnpPunch._orchestrate_soap_request')
+ def test_communicate_with_igd_succeed_despite_single_failure(
+ self, mock_orchestrate, mock_get_local_ips):
+ mock_get_local_ips.return_value = ['192.168.0.12']
+ mock_orchestrate.side_effect = [upnp.UpnpError, None]
+ upnp._communicate_with_igd(retries=2)
+ assert mock_get_local_ips.called
+ assert mock_orchestrate.called
+
+ @mock.patch('util.UpnpPunch._get_local_ips')
+ @mock.patch('util.UpnpPunch._orchestrate_soap_request')
+ def test_communicate_with_igd_total_failure(self, mock_orchestrate,
+ mock_get_local_ips):
+ mock_get_local_ips.return_value = ['192.168.0.12']
+ mock_orchestrate.side_effect = [upnp.UpnpError, upnp.IGDError]
+ with pytest.raises(upnp.UpnpError):
+ upnp._communicate_with_igd(retries=2)
+ assert mock_get_local_ips.called
+ assert mock_orchestrate.called
diff --git a/src/Test/TestUser.py b/src/Test/TestUser.py
new file mode 100644
index 00000000..e5ec5c8c
--- /dev/null
+++ b/src/Test/TestUser.py
@@ -0,0 +1,50 @@
+import pytest
+
+from Crypt import CryptBitcoin
+
+
+@pytest.mark.usefixtures("resetSettings")
+class TestUser:
+ def testAddress(self, user):
+ assert user.master_address == "15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc"
+ address_index = 1458664252141532163166741013621928587528255888800826689784628722366466547364755811
+ assert user.getAddressAuthIndex("15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc") == address_index
+
+ # Re-generate privatekey based on address_index
+ def testNewSite(self, user):
+ address, address_index, site_data = user.getNewSiteData() # Create a new random site
+ assert CryptBitcoin.hdPrivatekey(user.master_seed, address_index) == site_data["privatekey"]
+
+ user.sites = {} # Reset user data
+
+ # Site address and auth address is different
+ assert user.getSiteData(address)["auth_address"] != address
+ # Re-generate auth_privatekey for site
+ assert user.getSiteData(address)["auth_privatekey"] == site_data["auth_privatekey"]
+
+ def testAuthAddress(self, user):
+ # Auth address without Cert
+ auth_address = user.getAuthAddress("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr")
+ assert auth_address == "1MyJgYQjeEkR9QD66nkfJc9zqi9uUy5Lr2"
+ auth_privatekey = user.getAuthPrivatekey("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr")
+ assert CryptBitcoin.privatekeyToAddress(auth_privatekey) == auth_address
+
+ def testCert(self, user):
+ cert_auth_address = user.getAuthAddress("1iD5ZQJMNXu43w1qLB8sfdHVKppVMduGz") # Add site to user's registry
+ # Add cert
+ user.addCert(cert_auth_address, "zeroid.bit", "faketype", "fakeuser", "fakesign")
+ user.setCert("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr", "zeroid.bit")
+
+ # By using certificate the auth address should be same as the certificate provider
+ assert user.getAuthAddress("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr") == cert_auth_address
+ auth_privatekey = user.getAuthPrivatekey("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr")
+ assert CryptBitcoin.privatekeyToAddress(auth_privatekey) == cert_auth_address
+
+ # Test delete site data
+ assert "1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr" in user.sites
+ user.deleteSiteData("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr")
+ assert "1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr" not in user.sites
+
+ # Re-create add site should generate normal, unique auth_address
+ assert not user.getAuthAddress("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr") == cert_auth_address
+ assert user.getAuthAddress("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr") == "1MyJgYQjeEkR9QD66nkfJc9zqi9uUy5Lr2"
diff --git a/src/Test/TestWeb.py b/src/Test/TestWeb.py
new file mode 100644
index 00000000..2ce66c98
--- /dev/null
+++ b/src/Test/TestWeb.py
@@ -0,0 +1,105 @@
+import urllib.request
+
+import pytest
+
+try:
+ from selenium.webdriver.support.ui import WebDriverWait
+ from selenium.webdriver.support.expected_conditions import staleness_of, title_is
+ from selenium.common.exceptions import NoSuchElementException
+except:
+ pass
+
+
+class WaitForPageLoad(object):
+ def __init__(self, browser):
+ self.browser = browser
+
+ def __enter__(self):
+ self.old_page = self.browser.find_element_by_tag_name('html')
+
+ def __exit__(self, *args):
+ WebDriverWait(self.browser, 10).until(staleness_of(self.old_page))
+
+
+def getContextUrl(browser):
+ return browser.execute_script("return window.location.toString()")
+
+
+def getUrl(url):
+ content = urllib.request.urlopen(url).read()
+ assert "server error" not in content.lower(), "Got a server error! " + repr(url)
+ return content
+
+@pytest.mark.usefixtures("resetSettings")
+@pytest.mark.webtest
+class TestWeb:
+ def testFileSecurity(self, site_url):
+ assert "Not Found" in getUrl("%s/media/sites.json" % site_url)
+ assert "Forbidden" in getUrl("%s/media/./sites.json" % site_url)
+ assert "Forbidden" in getUrl("%s/media/../config.py" % site_url)
+ assert "Forbidden" in getUrl("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url)
+ assert "Forbidden" in getUrl("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url)
+ assert "Forbidden" in getUrl("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url)
+
+ assert "Not Found" in getUrl("%s/raw/sites.json" % site_url)
+ assert "Forbidden" in getUrl("%s/raw/./sites.json" % site_url)
+ assert "Forbidden" in getUrl("%s/raw/../config.py" % site_url)
+ assert "Forbidden" in getUrl("%s/raw/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url)
+ assert "Forbidden" in getUrl("%s/raw/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url)
+ assert "Forbidden" in getUrl("%s/raw/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url)
+
+ assert "Forbidden" in getUrl("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url)
+ assert "Forbidden" in getUrl("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url)
+ assert "Forbidden" in getUrl("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url)
+
+ assert "Forbidden" in getUrl("%s/content.db" % site_url)
+ assert "Forbidden" in getUrl("%s/./users.json" % site_url)
+ assert "Forbidden" in getUrl("%s/./key-rsa.pem" % site_url)
+ assert "Forbidden" in getUrl("%s/././././././././././//////sites.json" % site_url)
+
+ def testLinkSecurity(self, browser, site_url):
+ browser.get("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/test/security.html" % site_url)
+ WebDriverWait(browser, 10).until(title_is("ZeroHello - ZeroNet"))
+ assert getContextUrl(browser) == "%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/test/security.html" % site_url
+
+ # Switch to inner frame
+ browser.switch_to.frame(browser.find_element_by_id("inner-iframe"))
+ assert "wrapper_nonce" in getContextUrl(browser)
+ assert browser.find_element_by_id("script_output").text == "Result: Works"
+ browser.switch_to.default_content()
+
+ # Clicking on links without target
+ browser.switch_to.frame(browser.find_element_by_id("inner-iframe"))
+ with WaitForPageLoad(browser):
+ browser.find_element_by_id("link_to_current").click()
+ assert "wrapper_nonce" not in getContextUrl(browser) # The browser object back to default content
+ assert "Forbidden" not in browser.page_source
+ # Check if we have frame inside frame
+ browser.switch_to.frame(browser.find_element_by_id("inner-iframe"))
+ with pytest.raises(NoSuchElementException):
+ assert not browser.find_element_by_id("inner-iframe")
+ browser.switch_to.default_content()
+
+ # Clicking on link with target=_top
+ browser.switch_to.frame(browser.find_element_by_id("inner-iframe"))
+ with WaitForPageLoad(browser):
+ browser.find_element_by_id("link_to_top").click()
+ assert "wrapper_nonce" not in getContextUrl(browser) # The browser object back to default content
+ assert "Forbidden" not in browser.page_source
+ browser.switch_to.default_content()
+
+ # Try to escape from inner_frame
+ browser.switch_to.frame(browser.find_element_by_id("inner-iframe"))
+ assert "wrapper_nonce" in getContextUrl(browser) # Make sure we are inside of the inner-iframe
+ with WaitForPageLoad(browser):
+ browser.execute_script("window.top.location = window.location")
+ assert "wrapper_nonce" in getContextUrl(browser) # We try to use nonce-ed html without iframe
+ assert "
"+escape(e.message+"",true)+"
"}throw e}}marked.options=marked.setOptions=function(opt){merge(marked.defaults,opt);return marked};marked.defaults={gfm:true,tables:true,breaks:false,pedantic:false,sanitize:false,smartLists:false,silent:false,highlight:null,langPrefix:"lang-",smartypants:false,headerPrefix:"",renderer:new Renderer,xhtml:false};marked.Parser=Parser;marked.parser=Parser.parse;marked.Renderer=Renderer;marked.Lexer=Lexer;marked.lexer=Lexer.lex;marked.InlineLexer=InlineLexer;marked.inlineLexer=InlineLexer.output;marked.parse=marked;if(typeof module!=="undefined"&&typeof exports==="object"){module.exports=marked}else if(typeof define==="function"&&define.amd){define(function(){return marked})}else{this.marked=marked}}).call(function(){return this||(typeof window!=="undefined"?window:global)}());
+
+
+/* ---- data/1BLogC9LN4oPDcruNz3qo1ysa133E9AGg8/js/lib/pnglib.js ---- */
+
+
+/**
+* A handy class to calculate color values.
+*
+* @version 1.0
+* @author Robert Eisele
+* @copyright Copyright (c) 2010, Robert Eisele
+* @link http://www.xarg.org/2010/03/generate-client-side-png-files-using-javascript/
+* @license http://www.opensource.org/licenses/bsd-license.php BSD License
+*
+*/
+
+(function() {
+
+ // helper functions for that ctx
+ function write(buffer, offs) {
+ for (var i = 2; i < arguments.length; i++) {
+ for (var j = 0; j < arguments[i].length; j++) {
+ buffer[offs++] = arguments[i].charAt(j);
+ }
+ }
+ }
+
+ function byte2(w) {
+ return String.fromCharCode((w >> 8) & 255, w & 255);
+ }
+
+ function byte4(w) {
+ return String.fromCharCode((w >> 24) & 255, (w >> 16) & 255, (w >> 8) & 255, w & 255);
+ }
+
+ function byte2lsb(w) {
+ return String.fromCharCode(w & 255, (w >> 8) & 255);
+ }
+
+ window.PNGlib = function(width,height,depth) {
+
+ this.width = width;
+ this.height = height;
+ this.depth = depth;
+
+ // pixel data and row filter identifier size
+ this.pix_size = height * (width + 1);
+
+ // deflate header, pix_size, block headers, adler32 checksum
+ this.data_size = 2 + this.pix_size + 5 * Math.floor((0xfffe + this.pix_size) / 0xffff) + 4;
+
+ // offsets and sizes of Png chunks
+ this.ihdr_offs = 0; // IHDR offset and size
+ this.ihdr_size = 4 + 4 + 13 + 4;
+ this.plte_offs = this.ihdr_offs + this.ihdr_size; // PLTE offset and size
+ this.plte_size = 4 + 4 + 3 * depth + 4;
+ this.trns_offs = this.plte_offs + this.plte_size; // tRNS offset and size
+ this.trns_size = 4 + 4 + depth + 4;
+ this.idat_offs = this.trns_offs + this.trns_size; // IDAT offset and size
+ this.idat_size = 4 + 4 + this.data_size + 4;
+ this.iend_offs = this.idat_offs + this.idat_size; // IEND offset and size
+ this.iend_size = 4 + 4 + 4;
+ this.buffer_size = this.iend_offs + this.iend_size; // total PNG size
+
+ this.buffer = new Array();
+ this.palette = new Object();
+ this.pindex = 0;
+
+ var _crc32 = new Array();
+
+ // initialize buffer with zero bytes
+ for (var i = 0; i < this.buffer_size; i++) {
+ this.buffer[i] = "\x00";
+ }
+
+ // initialize non-zero elements
+ write(this.buffer, this.ihdr_offs, byte4(this.ihdr_size - 12), 'IHDR', byte4(width), byte4(height), "\x08\x03");
+ write(this.buffer, this.plte_offs, byte4(this.plte_size - 12), 'PLTE');
+ write(this.buffer, this.trns_offs, byte4(this.trns_size - 12), 'tRNS');
+ write(this.buffer, this.idat_offs, byte4(this.idat_size - 12), 'IDAT');
+ write(this.buffer, this.iend_offs, byte4(this.iend_size - 12), 'IEND');
+
+ // initialize deflate header
+ var header = ((8 + (7 << 4)) << 8) | (3 << 6);
+ header+= 31 - (header % 31);
+
+ write(this.buffer, this.idat_offs + 8, byte2(header));
+
+ // initialize deflate block headers
+ for (var i = 0; (i << 16) - 1 < this.pix_size; i++) {
+ var size, bits;
+ if (i + 0xffff < this.pix_size) {
+ size = 0xffff;
+ bits = "\x00";
+ } else {
+ size = this.pix_size - (i << 16) - i;
+ bits = "\x01";
+ }
+ write(this.buffer, this.idat_offs + 8 + 2 + (i << 16) + (i << 2), bits, byte2lsb(size), byte2lsb(~size));
+ }
+
+ /* Create crc32 lookup table */
+ for (var i = 0; i < 256; i++) {
+ var c = i;
+ for (var j = 0; j < 8; j++) {
+ if (c & 1) {
+ c = -306674912 ^ ((c >> 1) & 0x7fffffff);
+ } else {
+ c = (c >> 1) & 0x7fffffff;
+ }
+ }
+ _crc32[i] = c;
+ }
+
+ // compute the index into a png for a given pixel
+ this.index = function(x,y) {
+ var i = y * (this.width + 1) + x + 1;
+ var j = this.idat_offs + 8 + 2 + 5 * Math.floor((i / 0xffff) + 1) + i;
+ return j;
+ }
+
+ // convert a color and build up the palette
+ this.color = function(red, green, blue, alpha) {
+
+ alpha = alpha >= 0 ? alpha : 255;
+ var color = (((((alpha << 8) | red) << 8) | green) << 8) | blue;
+
+ if (typeof this.palette[color] == "undefined") {
+ if (this.pindex == this.depth) return "\x00";
+
+ var ndx = this.plte_offs + 8 + 3 * this.pindex;
+
+ this.buffer[ndx + 0] = String.fromCharCode(red);
+ this.buffer[ndx + 1] = String.fromCharCode(green);
+ this.buffer[ndx + 2] = String.fromCharCode(blue);
+ this.buffer[this.trns_offs+8+this.pindex] = String.fromCharCode(alpha);
+
+ this.palette[color] = String.fromCharCode(this.pindex++);
+ }
+ return this.palette[color];
+ }
+
+ // output a PNG string, Base64 encoded
+ this.getBase64 = function() {
+
+ var s = this.getDump();
+
+ var ch = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";
+ var c1, c2, c3, e1, e2, e3, e4;
+ var l = s.length;
+ var i = 0;
+ var r = "";
+
+ do {
+ c1 = s.charCodeAt(i);
+ e1 = c1 >> 2;
+ c2 = s.charCodeAt(i+1);
+ e2 = ((c1 & 3) << 4) | (c2 >> 4);
+ c3 = s.charCodeAt(i+2);
+ if (l < i+2) { e3 = 64; } else { e3 = ((c2 & 0xf) << 2) | (c3 >> 6); }
+ if (l < i+3) { e4 = 64; } else { e4 = c3 & 0x3f; }
+ r+= ch.charAt(e1) + ch.charAt(e2) + ch.charAt(e3) + ch.charAt(e4);
+ } while ((i+= 3) < l);
+ return r;
+ }
+
+ // output a PNG string
+ this.getDump = function() {
+
+ // compute adler32 of output pixels + row filter bytes
+ var BASE = 65521; /* largest prime smaller than 65536 */
+ var NMAX = 5552; /* NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1 */
+ var s1 = 1;
+ var s2 = 0;
+ var n = NMAX;
+
+ for (var y = 0; y < this.height; y++) {
+ for (var x = -1; x < this.width; x++) {
+ s1+= this.buffer[this.index(x, y)].charCodeAt(0);
+ s2+= s1;
+ if ((n-= 1) == 0) {
+ s1%= BASE;
+ s2%= BASE;
+ n = NMAX;
+ }
+ }
+ }
+ s1%= BASE;
+ s2%= BASE;
+ write(this.buffer, this.idat_offs + this.idat_size - 8, byte4((s2 << 16) | s1));
+
+ // compute crc32 of the PNG chunks
+ function crc32(png, offs, size) {
+ var crc = -1;
+ for (var i = 4; i < size-4; i += 1) {
+ crc = _crc32[(crc ^ png[offs+i].charCodeAt(0)) & 0xff] ^ ((crc >> 8) & 0x00ffffff);
+ }
+ write(png, offs+size-4, byte4(crc ^ -1));
+ }
+
+ crc32(this.buffer, this.ihdr_offs, this.ihdr_size);
+ crc32(this.buffer, this.plte_offs, this.plte_size);
+ crc32(this.buffer, this.trns_offs, this.trns_size);
+ crc32(this.buffer, this.idat_offs, this.idat_size);
+ crc32(this.buffer, this.iend_offs, this.iend_size);
+
+ // convert PNG to string
+ return "\211PNG\r\n\032\n"+this.buffer.join('');
+ }
+ }
+
+})();
+
+
+
+/* ---- data/1BLogC9LN4oPDcruNz3qo1ysa133E9AGg8/js/utils/Class.coffee ---- */
+
+
+(function() {
+ var Class,
+ __slice = [].slice;
+
+ Class = (function() {
+ function Class() {}
+
+ Class.prototype.trace = true;
+
+ Class.prototype.log = function() {
+ var args;
+ args = 1 <= arguments.length ? __slice.call(arguments, 0) : [];
+ if (!this.trace) {
+ return;
+ }
+ if (typeof console === 'undefined') {
+ return;
+ }
+ args.unshift("[" + this.constructor.name + "]");
+ console.log.apply(console, args);
+ return this;
+ };
+
+ Class.prototype.logStart = function() {
+ var args, name;
+ name = arguments[0], args = 2 <= arguments.length ? __slice.call(arguments, 1) : [];
+ if (!this.trace) {
+ return;
+ }
+ this.logtimers || (this.logtimers = {});
+ this.logtimers[name] = +(new Date);
+ if (args.length > 0) {
+ this.log.apply(this, ["" + name].concat(__slice.call(args), ["(started)"]));
+ }
+ return this;
+ };
+
+ Class.prototype.logEnd = function() {
+ var args, ms, name;
+ name = arguments[0], args = 2 <= arguments.length ? __slice.call(arguments, 1) : [];
+ ms = +(new Date) - this.logtimers[name];
+ this.log.apply(this, ["" + name].concat(__slice.call(args), ["(Done in " + ms + "ms)"]));
+ return this;
+ };
+
+ return Class;
+
+ })();
+
+ window.Class = Class;
+
+}).call(this);
+
+
+
+/* ---- data/1BLogC9LN4oPDcruNz3qo1ysa133E9AGg8/js/utils/InlineEditor.coffee ---- */
+
+
+(function() {
+ var InlineEditor,
+ __bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; };
+
+ InlineEditor = (function() {
+ function InlineEditor(_at_elem, _at_getContent, _at_saveContent, _at_getObject) {
+ this.elem = _at_elem;
+ this.getContent = _at_getContent;
+ this.saveContent = _at_saveContent;
+ this.getObject = _at_getObject;
+ this.cancelEdit = __bind(this.cancelEdit, this);
+ this.deleteObject = __bind(this.deleteObject, this);
+ this.saveEdit = __bind(this.saveEdit, this);
+ this.stopEdit = __bind(this.stopEdit, this);
+ this.startEdit = __bind(this.startEdit, this);
+ this.edit_button = $("");
+ this.edit_button.on("click", this.startEdit);
+ this.elem.addClass("editable").before(this.edit_button);
+ this.editor = null;
+ this.elem.on("mouseenter", (function(_this) {
+ return function(e) {
+ var scrolltop, top;
+ _this.edit_button.css("opacity", "0.4");
+ scrolltop = $(window).scrollTop();
+ top = _this.edit_button.offset().top - parseInt(_this.edit_button.css("margin-top"));
+ if (scrolltop > top) {
+ return _this.edit_button.css("margin-top", scrolltop - top + e.clientY - 20);
+ } else {
+ return _this.edit_button.css("margin-top", "");
+ }
+ };
+ })(this));
+ this.elem.on("mouseleave", (function(_this) {
+ return function() {
+ return _this.edit_button.css("opacity", "");
+ };
+ })(this));
+ if (this.elem.is(":hover")) {
+ this.elem.trigger("mouseenter");
+ }
+ }
+
+ InlineEditor.prototype.startEdit = function() {
+ var _i, _results;
+ this.content_before = this.elem.html();
+ this.editor = $("");
+ this.editor.css("outline", "10000px solid rgba(255,255,255,0)").cssLater("transition", "outline 0.3s", 5).cssLater("outline", "10000px solid rgba(255,255,255,0.9)", 10);
+ this.editor.val(this.getContent(this.elem, "raw"));
+ this.elem.after(this.editor);
+ this.elem.html((function() {
+ _results = [];
+ for (_i = 1; _i <= 50; _i++){ _results.push(_i); }
+ return _results;
+ }).apply(this).join("fill the width"));
+ this.copyStyle(this.elem, this.editor);
+ this.elem.html(this.content_before);
+ this.autoExpand(this.editor);
+ this.elem.css("display", "none");
+ if ($(window).scrollTop() === 0) {
+ this.editor[0].selectionEnd = 0;
+ this.editor.focus();
+ }
+ $(".editable-edit").css("display", "none");
+ $(".editbar").css("display", "inline-block").addClassLater("visible", 10);
+ $(".publishbar").css("opacity", 0);
+ $(".editbar .object").text(this.getObject(this.elem).data("object") + "." + this.elem.data("editable"));
+ $(".editbar .button").removeClass("loading");
+ $(".editbar .save").off("click").on("click", this.saveEdit);
+ $(".editbar .delete").off("click").on("click", this.deleteObject);
+ $(".editbar .cancel").off("click").on("click", this.cancelEdit);
+ if (this.getObject(this.elem).data("deletable")) {
+ $(".editbar .delete").css("display", "").html("Delete " + this.getObject(this.elem).data("object").split(":")[0]);
+ } else {
+ $(".editbar .delete").css("display", "none");
+ }
+ window.onbeforeunload = function() {
+ return 'Your unsaved blog changes will be lost!';
+ };
+ return false;
+ };
+
+ InlineEditor.prototype.stopEdit = function() {
+ this.editor.remove();
+ this.editor = null;
+ this.elem.css("display", "");
+ $(".editable-edit").css("display", "");
+ $(".editbar").cssLater("display", "none", 1000).removeClass("visible");
+ $(".publishbar").css("opacity", 1);
+ return window.onbeforeunload = null;
+ };
+
+ InlineEditor.prototype.saveEdit = function() {
+ var content;
+ content = this.editor.val();
+ $(".editbar .save").addClass("loading");
+ this.saveContent(this.elem, content, (function(_this) {
+ return function(content_html) {
+ if (content_html) {
+ $(".editbar .save").removeClass("loading");
+ _this.stopEdit();
+ if (typeof content_html === "string") {
+ _this.elem.html(content_html);
+ }
+ return $('pre code').each(function(i, block) {
+ return hljs.highlightBlock(block);
+ });
+ } else {
+ return $(".editbar .save").removeClass("loading");
+ }
+ };
+ })(this));
+ return false;
+ };
+
+ InlineEditor.prototype.deleteObject = function() {
+ var object_type;
+ object_type = this.getObject(this.elem).data("object").split(":")[0];
+ Page.cmd("wrapperConfirm", ["Are you sure you sure to delete this " + object_type + "?", "Delete"], (function(_this) {
+ return function(confirmed) {
+ $(".editbar .delete").addClass("loading");
+ return Page.saveContent(_this.getObject(_this.elem), null, function() {
+ return _this.stopEdit();
+ });
+ };
+ })(this));
+ return false;
+ };
+
+ InlineEditor.prototype.cancelEdit = function() {
+ this.stopEdit();
+ this.elem.html(this.content_before);
+ $('pre code').each(function(i, block) {
+ return hljs.highlightBlock(block);
+ });
+ return false;
+ };
+
+ InlineEditor.prototype.copyStyle = function(elem_from, elem_to) {
+ var from_style;
+ elem_to.addClass(elem_from[0].className);
+ from_style = getComputedStyle(elem_from[0]);
+ elem_to.css({
+ fontFamily: from_style.fontFamily,
+ fontSize: from_style.fontSize,
+ fontWeight: from_style.fontWeight,
+ marginTop: from_style.marginTop,
+ marginRight: from_style.marginRight,
+ marginBottom: from_style.marginBottom,
+ marginLeft: from_style.marginLeft,
+ paddingTop: from_style.paddingTop,
+ paddingRight: from_style.paddingRight,
+ paddingBottom: from_style.paddingBottom,
+ paddingLeft: from_style.paddingLeft,
+ lineHeight: from_style.lineHeight,
+ textAlign: from_style.textAlign,
+ color: from_style.color,
+ letterSpacing: from_style.letterSpacing
+ });
+ if (elem_from.innerWidth() < 1000) {
+ return elem_to.css("minWidth", elem_from.innerWidth());
+ }
+ };
+
+ InlineEditor.prototype.autoExpand = function(elem) {
+ var editor;
+ editor = elem[0];
+ elem.height(1);
+ elem.on("input", function() {
+ if (editor.scrollHeight > elem.height()) {
+ return elem.height(1).height(editor.scrollHeight + parseFloat(elem.css("borderTopWidth")) + parseFloat(elem.css("borderBottomWidth")));
+ }
+ });
+ elem.trigger("input");
+ return elem.on('keydown', function(e) {
+ var s, val;
+ if (e.which === 9) {
+ e.preventDefault();
+ s = this.selectionStart;
+ val = elem.val();
+ elem.val(val.substring(0, this.selectionStart) + "\t" + val.substring(this.selectionEnd));
+ return this.selectionEnd = s + 1;
+ }
+ });
+ };
+
+ return InlineEditor;
+
+ })();
+
+ window.InlineEditor = InlineEditor;
+
+}).call(this);
+
+
+
+/* ---- data/1BLogC9LN4oPDcruNz3qo1ysa133E9AGg8/js/utils/RateLimit.coffee ---- */
+
+
+(function() {
+ var call_after_interval, limits;
+
+ limits = {};
+
+ call_after_interval = {};
+
+ window.RateLimit = function(interval, fn) {
+ if (!limits[fn]) {
+ call_after_interval[fn] = false;
+ fn();
+ return limits[fn] = setTimeout((function() {
+ if (call_after_interval[fn]) {
+ fn();
+ }
+ delete limits[fn];
+ return delete call_after_interval[fn];
+ }), interval);
+ } else {
+ return call_after_interval[fn] = true;
+ }
+ };
+
+}).call(this);
+
+
+
+/* ---- data/1BLogC9LN4oPDcruNz3qo1ysa133E9AGg8/js/utils/Text.coffee ---- */
+
+
+(function() {
+ var Renderer, Text,
+ __extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; },
+ __hasProp = {}.hasOwnProperty,
+ __bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; };
+
+ Renderer = (function(_super) {
+ __extends(Renderer, _super);
+
+ function Renderer() {
+ return Renderer.__super__.constructor.apply(this, arguments);
+ }
+
+ Renderer.prototype.image = function(href, title, text) {
+ return "![" + text + "](" + href + ")
";
+ };
+
+ return Renderer;
+
+ })(marked.Renderer);
+
+ Text = (function() {
+ function Text() {
+ this.toUrl = __bind(this.toUrl, this);
+ }
+
+ Text.prototype.toColor = function(text) {
+ var color, hash, i, value, _i, _j, _ref;
+ hash = 0;
+ for (i = _i = 0, _ref = text.length - 1; 0 <= _ref ? _i <= _ref : _i >= _ref; i = 0 <= _ref ? ++_i : --_i) {
+ hash = text.charCodeAt(i) + ((hash << 5) - hash);
+ }
+ color = '#';
+ return "hsl(" + (hash % 360) + ",30%,50%)";
+ for (i = _j = 0; _j <= 2; i = ++_j) {
+ value = (hash >> (i * 8)) & 0xFF;
+ color += ('00' + value.toString(16)).substr(-2);
+ }
+ return color;
+ };
+
+ Text.prototype.toMarked = function(text, options) {
+ if (options == null) {
+ options = {};
+ }
+ options["gfm"] = true;
+ options["breaks"] = true;
+ if (options.sanitize) {
+ options["renderer"] = renderer;
+ }
+ text = marked(text, options);
+ return this.fixHtmlLinks(text);
+ };
+
+ Text.prototype.fixHtmlLinks = function(text) {
+ if (window.is_proxy) {
+ return text.replace(/href="http:\/\/(127.0.0.1|localhost):43110/g, 'href="http://zero');
+ } else {
+ return text.replace(/href="http:\/\/(127.0.0.1|localhost):43110/g, 'href="');
+ }
+ };
+
+ Text.prototype.fixLink = function(link) {
+ if (window.is_proxy) {
+ return link.replace(/http:\/\/(127.0.0.1|localhost):43110/, 'http://zero');
+ } else {
+ return link.replace(/http:\/\/(127.0.0.1|localhost):43110/, '');
+ }
+ };
+
+ Text.prototype.toUrl = function(text) {
+ return text.replace(/[^A-Za-z0-9]/g, "+").replace(/[+]+/g, "+").replace(/[+]+$/, "");
+ };
+
+ return Text;
+
+ })();
+
+ window.is_proxy = window.location.pathname === "/";
+
+ window.renderer = new Renderer();
+
+ window.Text = new Text();
+
+}).call(this);
+
+
+
+/* ---- data/1BLogC9LN4oPDcruNz3qo1ysa133E9AGg8/js/utils/Time.coffee ---- */
+
+
+(function() {
+ var Time;
+
+ Time = (function() {
+ function Time() {}
+
+ Time.prototype.since = function(time) {
+ var back, now, secs;
+ now = +(new Date) / 1000;
+ secs = now - time;
+ if (secs < 60) {
+ back = "Just now";
+ } else if (secs < 60 * 60) {
+ back = (Math.round(secs / 60)) + " minutes ago";
+ } else if (secs < 60 * 60 * 24) {
+ back = (Math.round(secs / 60 / 60)) + " hours ago";
+ } else if (secs < 60 * 60 * 24 * 3) {
+ back = (Math.round(secs / 60 / 60 / 24)) + " days ago";
+ } else {
+ back = "on " + this.date(time);
+ }
+ back = back.replace(/1 ([a-z]+)s/, "1 $1");
+ return back;
+ };
+
+ Time.prototype.date = function(timestamp, format) {
+ var display, parts;
+ if (format == null) {
+ format = "short";
+ }
+ parts = (new Date(timestamp * 1000)).toString().split(" ");
+ if (format === "short") {
+ display = parts.slice(1, 4);
+ } else {
+ display = parts.slice(1, 5);
+ }
+ return display.join(" ").replace(/( [0-9]{4})/, ",$1");
+ };
+
+ Time.prototype.timestamp = function(date) {
+ if (date == null) {
+ date = "";
+ }
+ if (date === "now" || date === "") {
+ return parseInt(+(new Date) / 1000);
+ } else {
+ return parseInt(Date.parse(date) / 1000);
+ }
+ };
+
+ Time.prototype.readtime = function(text) {
+ var chars;
+ chars = text.length;
+ if (chars > 1500) {
+ return parseInt(chars / 1500) + " min read";
+ } else {
+ return "less than 1 min read";
+ }
+ };
+
+ return Time;
+
+ })();
+
+ window.Time = new Time;
+
+}).call(this);
+
+
+
+/* ---- data/1BLogC9LN4oPDcruNz3qo1ysa133E9AGg8/js/utils/ZeroFrame.coffee ---- */
+
+
+(function() {
+ var ZeroFrame,
+ __bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; },
+ __extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; },
+ __hasProp = {}.hasOwnProperty;
+
+ ZeroFrame = (function(_super) {
+ __extends(ZeroFrame, _super);
+
+ function ZeroFrame(url) {
+ this.onCloseWebsocket = __bind(this.onCloseWebsocket, this);
+ this.onOpenWebsocket = __bind(this.onOpenWebsocket, this);
+ this.route = __bind(this.route, this);
+ this.onMessage = __bind(this.onMessage, this);
+ this.url = url;
+ this.waiting_cb = {};
+ this.connect();
+ this.next_message_id = 1;
+ this.init();
+ }
+
+ ZeroFrame.prototype.init = function() {
+ return this;
+ };
+
+ ZeroFrame.prototype.connect = function() {
+ this.target = window.parent;
+ window.addEventListener("message", this.onMessage, false);
+ return this.cmd("innerReady");
+ };
+
+ ZeroFrame.prototype.onMessage = function(e) {
+ var cmd, message;
+ message = e.data;
+ cmd = message.cmd;
+ if (cmd === "response") {
+ if (this.waiting_cb[message.to] != null) {
+ return this.waiting_cb[message.to](message.result);
+ } else {
+ return this.log("Websocket callback not found:", message);
+ }
+ } else if (cmd === "wrapperReady") {
+ return this.cmd("innerReady");
+ } else if (cmd === "ping") {
+ return this.response(message.id, "pong");
+ } else if (cmd === "wrapperOpenedWebsocket") {
+ return this.onOpenWebsocket();
+ } else if (cmd === "wrapperClosedWebsocket") {
+ return this.onCloseWebsocket();
+ } else {
+ return this.onRequest(cmd, message);
+ }
+ };
+
+ ZeroFrame.prototype.route = function(cmd, message) {
+ return this.log("Unknown command", message);
+ };
+
+ ZeroFrame.prototype.response = function(to, result) {
+ return this.send({
+ "cmd": "response",
+ "to": to,
+ "result": result
+ });
+ };
+
+ ZeroFrame.prototype.cmd = function(cmd, params, cb) {
+ if (params == null) {
+ params = {};
+ }
+ if (cb == null) {
+ cb = null;
+ }
+ return this.send({
+ "cmd": cmd,
+ "params": params
+ }, cb);
+ };
+
+ ZeroFrame.prototype.send = function(message, cb) {
+ if (cb == null) {
+ cb = null;
+ }
+ message.id = this.next_message_id;
+ this.next_message_id += 1;
+ this.target.postMessage(message, "*");
+ if (cb) {
+ return this.waiting_cb[message.id] = cb;
+ }
+ };
+
+ ZeroFrame.prototype.onOpenWebsocket = function() {
+ return this.log("Websocket open");
+ };
+
+ ZeroFrame.prototype.onCloseWebsocket = function() {
+ return this.log("Websocket close");
+ };
+
+ return ZeroFrame;
+
+ })(Class);
+
+ window.ZeroFrame = ZeroFrame;
+
+}).call(this);
+
+
+
+/* ---- data/1BLogC9LN4oPDcruNz3qo1ysa133E9AGg8/js/Comments.coffee ---- */
+
+
+(function() {
+ var Comments,
+ __extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; },
+ __hasProp = {}.hasOwnProperty;
+
+ Comments = (function(_super) {
+ __extends(Comments, _super);
+
+ function Comments() {
+ return Comments.__super__.constructor.apply(this, arguments);
+ }
+
+ Comments.prototype.pagePost = function(post_id, cb) {
+ if (cb == null) {
+ cb = false;
+ }
+ this.post_id = post_id;
+ this.rules = {};
+ $(".button-submit-comment").on("click", (function(_this) {
+ return function() {
+ _this.submitComment();
+ return false;
+ };
+ })(this));
+ this.loadComments("noanim", cb);
+ this.autoExpand($(".comment-textarea"));
+ return $(".certselect").on("click", (function(_this) {
+ return function() {
+ if (Page.server_info.rev < 160) {
+ Page.cmd("wrapperNotification", ["error", "Comments requires at least ZeroNet 0.3.0 Please upgade!"]);
+ } else {
+ Page.cmd("certSelect", [["zeroid.bit"]]);
+ }
+ return false;
+ };
+ })(this));
+ };
+
+ Comments.prototype.loadComments = function(type, cb) {
+ var query;
+ if (type == null) {
+ type = "show";
+ }
+ if (cb == null) {
+ cb = false;
+ }
+ query = "SELECT comment.*, json_content.json_id AS content_json_id, keyvalue.value AS cert_user_id, json.directory, (SELECT COUNT(*) FROM comment_vote WHERE comment_vote.comment_uri = comment.comment_id || '@' || json.directory)+1 AS votes FROM comment LEFT JOIN json USING (json_id) LEFT JOIN json AS json_content ON (json_content.directory = json.directory AND json_content.file_name='content.json') LEFT JOIN keyvalue ON (keyvalue.json_id = json_content.json_id AND key = 'cert_user_id') WHERE post_id = " + this.post_id + " ORDER BY date_added DESC";
+ return Page.cmd("dbQuery", query, (function(_this) {
+ return function(comments) {
+ var comment, comment_address, elem, user_address, _i, _len;
+ $(".comments-num").text(comments.length);
+ for (_i = 0, _len = comments.length; _i < _len; _i++) {
+ comment = comments[_i];
+ user_address = comment.directory.replace("users/", "");
+ comment_address = comment.comment_id + "_" + user_address;
+ elem = $("#comment_" + comment_address);
+ if (elem.length === 0) {
+ elem = $(".comment.template").clone().removeClass("template").attr("id", "comment_" + comment_address).data("post_id", _this.post_id);
+ if (type !== "noanim") {
+ elem.cssSlideDown();
+ }
+ $(".reply", elem).on("click", function(e) {
+ return _this.buttonReply($(e.target).parents(".comment"));
+ });
+ }
+ _this.applyCommentData(elem, comment);
+ elem.appendTo(".comments");
+ }
+ return setTimeout((function() {
+ return Page.addInlineEditors();
+ }), 1000);
+ };
+ })(this));
+ };
+
+ Comments.prototype.applyCommentData = function(elem, comment) {
+ var cert_domain, user_address, user_name, _ref;
+ _ref = comment.cert_user_id.split("@"), user_name = _ref[0], cert_domain = _ref[1];
+ user_address = comment.directory.replace("users/", "");
+ $(".comment-body", elem).html(Text.toMarked(comment.body, {
+ "sanitize": true
+ }));
+ $(".user_name", elem).text(user_name).css({
+ "color": Text.toColor(comment.cert_user_id)
+ }).attr("title", user_name + "@" + cert_domain + ": " + user_address);
+ $(".added", elem).text(Time.since(comment.date_added)).attr("title", Time.date(comment.date_added, "long"));
+ if (user_address === Page.site_info.auth_address) {
+ $(elem).attr("data-object", "Comment:" + comment.comment_id).attr("data-deletable", "yes");
+ return $(".comment-body", elem).attr("data-editable", "body").data("content", comment.body);
+ }
+ };
+
+ Comments.prototype.buttonReply = function(elem) {
+ var body_add, elem_quote, post_id, user_name;
+ this.log("Reply to", elem);
+ user_name = $(".user_name", elem).text();
+ post_id = elem.attr("id");
+ body_add = "> [" + user_name + "](\#" + post_id + "): ";
+ elem_quote = $(".comment-body", elem).clone();
+ $("blockquote", elem_quote).remove();
+ body_add += elem_quote.text().trim("\n").replace(/\n/g, "\n> ");
+ body_add += "\n\n";
+ $(".comment-new .comment-textarea").val($(".comment-new .comment-textarea").val() + body_add);
+ $(".comment-new .comment-textarea").trigger("input").focus();
+ return false;
+ };
+
+ Comments.prototype.submitComment = function() {
+ var body, inner_path;
+ if (!Page.site_info.cert_user_id) {
+ Page.cmd("wrapperNotification", ["info", "Please, select your account."]);
+ return false;
+ }
+ body = $(".comment-new .comment-textarea").val();
+ if (!body) {
+ $(".comment-new .comment-textarea").focus();
+ return false;
+ }
+ $(".comment-new .button-submit").addClass("loading");
+ inner_path = "data/users/" + Page.site_info.auth_address + "/data.json";
+ return Page.cmd("fileGet", {
+ "inner_path": inner_path,
+ "required": false
+ }, (function(_this) {
+ return function(data) {
+ var json_raw;
+ if (data) {
+ data = JSON.parse(data);
+ } else {
+ data = {
+ "next_comment_id": 1,
+ "comment": [],
+ "comment_vote": {}
+ };
+ }
+ data.comment.push({
+ "comment_id": data.next_comment_id,
+ "body": body,
+ "post_id": _this.post_id,
+ "date_added": Time.timestamp()
+ });
+ data.next_comment_id += 1;
+ json_raw = unescape(encodeURIComponent(JSON.stringify(data, void 0, '\t')));
+ return Page.writePublish(inner_path, btoa(json_raw), function(res) {
+ $(".comment-new .button-submit").removeClass("loading");
+ _this.loadComments();
+ _this.checkCert("updaterules");
+ _this.log("Writepublish result", res);
+ if (res !== false) {
+ return $(".comment-new .comment-textarea").val("");
+ }
+ });
+ };
+ })(this));
+ };
+
+ Comments.prototype.checkCert = function(type) {
+ var last_cert_user_id;
+ last_cert_user_id = $(".comment-new .user_name").text();
+ if (Page.site_info.cert_user_id) {
+ $(".comment-new").removeClass("comment-nocert");
+ $(".comment-new .user_name").text(Page.site_info.cert_user_id);
+ } else {
+ $(".comment-new").addClass("comment-nocert");
+ $(".comment-new .user_name").text("Please sign in");
+ }
+ if ($(".comment-new .user_name").text() !== last_cert_user_id || type === "updaterules") {
+ if (Page.site_info.cert_user_id) {
+ return Page.cmd("fileRules", "data/users/" + Page.site_info.auth_address + "/content.json", (function(_this) {
+ return function(rules) {
+ _this.rules = rules;
+ if (rules.max_size) {
+ return _this.setCurrentSize(rules.current_size);
+ } else {
+ return _this.setCurrentSize(0);
+ }
+ };
+ })(this));
+ } else {
+ return this.setCurrentSize(0);
+ }
+ }
+ };
+
+ Comments.prototype.setCurrentSize = function(current_size) {
+ var current_size_kb;
+ if (current_size) {
+ current_size_kb = current_size / 1000;
+ $(".user-size").text("used: " + (current_size_kb.toFixed(1)) + "k/" + (Math.round(this.rules.max_size / 1000)) + "k");
+ return $(".user-size-used").css("width", Math.round(70 * current_size / this.rules.max_size));
+ } else {
+ return $(".user-size").text("");
+ }
+ };
+
+ Comments.prototype.autoExpand = function(elem) {
+ var editor;
+ editor = elem[0];
+ if (elem.height() > 0) {
+ elem.height(1);
+ }
+ elem.on("input", (function(_this) {
+ return function() {
+ var current_size, min_height, new_height, old_height;
+ if (editor.scrollHeight > elem.height()) {
+ old_height = elem.height();
+ elem.height(1);
+ new_height = editor.scrollHeight;
+ new_height += parseFloat(elem.css("borderTopWidth"));
+ new_height += parseFloat(elem.css("borderBottomWidth"));
+ new_height -= parseFloat(elem.css("paddingTop"));
+ new_height -= parseFloat(elem.css("paddingBottom"));
+ min_height = parseFloat(elem.css("lineHeight")) * 2;
+ if (new_height < min_height) {
+ new_height = min_height + 4;
+ }
+ elem.height(new_height - 4);
+ }
+ if (_this.rules.max_size) {
+ if (elem.val().length > 0) {
+ current_size = _this.rules.current_size + elem.val().length + 90;
+ } else {
+ current_size = _this.rules.current_size;
+ }
+ return _this.setCurrentSize(current_size);
+ }
+ };
+ })(this));
+ if (elem.height() > 0) {
+ return elem.trigger("input");
+ } else {
+ return elem.height("48px");
+ }
+ };
+
+ return Comments;
+
+ })(Class);
+
+ window.Comments = new Comments();
+
+}).call(this);
+
+
+
+/* ---- data/1BLogC9LN4oPDcruNz3qo1ysa133E9AGg8/js/ZeroBlog.coffee ---- */
+
+
+(function() {
+ var ZeroBlog,
+ __bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; },
+ __extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; },
+ __hasProp = {}.hasOwnProperty;
+
+ ZeroBlog = (function(_super) {
+ __extends(ZeroBlog, _super);
+
+ function ZeroBlog() {
+ this.setSiteinfo = __bind(this.setSiteinfo, this);
+ this.actionSetSiteInfo = __bind(this.actionSetSiteInfo, this);
+ this.saveContent = __bind(this.saveContent, this);
+ this.getContent = __bind(this.getContent, this);
+ this.getObject = __bind(this.getObject, this);
+ this.onOpenWebsocket = __bind(this.onOpenWebsocket, this);
+ this.publish = __bind(this.publish, this);
+ this.pageLoaded = __bind(this.pageLoaded, this);
+ return ZeroBlog.__super__.constructor.apply(this, arguments);
+ }
+
+ ZeroBlog.prototype.init = function() {
+ this.data = null;
+ this.site_info = null;
+ this.server_info = null;
+ this.event_page_load = $.Deferred();
+ this.event_site_info = $.Deferred();
+ $.when(this.event_page_load, this.event_site_info).done((function(_this) {
+ return function() {
+ if (_this.site_info.settings.own || _this.data.demo) {
+ _this.addInlineEditors();
+ _this.checkPublishbar();
+ $(".publishbar").on("click", _this.publish);
+ $(".posts .button.new").css("display", "inline-block");
+ return $(".editbar .icon-help").on("click", function() {
+ $(".editbar .markdown-help").css("display", "block");
+ $(".editbar .markdown-help").toggleClassLater("visible", 10);
+ $(".editbar .icon-help").toggleClass("active");
+ return false;
+ });
+ }
+ };
+ })(this));
+ $.when(this.event_site_info).done((function(_this) {
+ return function() {
+ var imagedata;
+ _this.log("event site info");
+ imagedata = new Identicon(_this.site_info.address, 70).toString();
+ return $("body").append("");
+ };
+ })(this));
+ return this.log("inited!");
+ };
+
+ ZeroBlog.prototype.loadData = function(query) {
+ if (query == null) {
+ query = "new";
+ }
+ if (query === "old") {
+ query = "SELECT key, value FROM json LEFT JOIN keyvalue USING (json_id) WHERE path = 'data.json'";
+ } else {
+ query = "SELECT key, value FROM json LEFT JOIN keyvalue USING (json_id) WHERE directory = '' AND file_name = 'data.json'";
+ }
+ return this.cmd("dbQuery", [query], (function(_this) {
+ return function(res) {
+ var row, _i, _len;
+ _this.data = {};
+ if (res) {
+ for (_i = 0, _len = res.length; _i < _len; _i++) {
+ row = res[_i];
+ _this.data[row.key] = row.value;
+ }
+ $(".left h1 a:not(.editable-edit)").html(_this.data.title).data("content", _this.data.title);
+ $(".left h2").html(Text.toMarked(_this.data.description)).data("content", _this.data.description);
+ return $(".left .links").html(Text.toMarked(_this.data.links)).data("content", _this.data.links);
+ }
+ };
+ })(this));
+ };
+
+ ZeroBlog.prototype.routeUrl = function(url) {
+ var match;
+ this.log("Routing url:", url);
+ if (match = url.match(/Post:([0-9]+)/)) {
+ $("body").addClass("page-post");
+ this.post_id = parseInt(match[1]);
+ return this.pagePost();
+ } else {
+ $("body").addClass("page-main");
+ return this.pageMain();
+ }
+ };
+
+ ZeroBlog.prototype.pagePost = function() {
+ var s;
+ s = +(new Date);
+ return this.cmd("dbQuery", ["SELECT * FROM post WHERE post_id = " + this.post_id + " LIMIT 1"], (function(_this) {
+ return function(res) {
+ if (res.length) {
+ _this.applyPostdata($(".post-full"), res[0], true);
+ Comments.pagePost(_this.post_id);
+ } else {
+ $(".post-full").html("Not found
");
+ }
+ return _this.pageLoaded();
+ };
+ })(this));
+ };
+
+ ZeroBlog.prototype.pageMain = function() {
+ return this.cmd("dbQuery", ["SELECT post.*, COUNT(comment_id) AS comments FROM post LEFT JOIN comment USING (post_id) GROUP BY post_id ORDER BY date_published"], (function(_this) {
+ return function(res) {
+ var elem, post, s, _i, _len;
+ s = +(new Date);
+ for (_i = 0, _len = res.length; _i < _len; _i++) {
+ post = res[_i];
+ elem = $("#post_" + post.post_id);
+ if (elem.length === 0) {
+ elem = $(".post.template").clone().removeClass("template").attr("id", "post_" + post.post_id);
+ elem.prependTo(".posts");
+ }
+ _this.applyPostdata(elem, post);
+ }
+ _this.pageLoaded();
+ _this.log("Posts loaded in", (+(new Date)) - s, "ms");
+ return $(".posts .new").on("click", function() {
+ _this.cmd("fileGet", ["data/data.json"], function(res) {
+ var data;
+ data = JSON.parse(res);
+ data.post.unshift({
+ post_id: data.next_post_id,
+ title: "New blog post",
+ date_published: (+(new Date)) / 1000,
+ body: "Blog post body"
+ });
+ data.next_post_id += 1;
+ elem = $(".post.template").clone().removeClass("template");
+ _this.applyPostdata(elem, data.post[0]);
+ elem.hide();
+ elem.prependTo(".posts").slideDown();
+ _this.addInlineEditors(elem);
+ return _this.writeData(data);
+ });
+ return false;
+ });
+ };
+ })(this));
+ };
+
+ ZeroBlog.prototype.pageLoaded = function() {
+ $("body").addClass("loaded");
+ $('pre code').each(function(i, block) {
+ return hljs.highlightBlock(block);
+ });
+ this.event_page_load.resolve();
+ return this.cmd("innerLoaded", true);
+ };
+
+ ZeroBlog.prototype.addInlineEditors = function(parent) {
+ var editor, elem, elems, _i, _len;
+ this.logStart("Adding inline editors");
+ elems = $("[data-editable]:visible", parent);
+ for (_i = 0, _len = elems.length; _i < _len; _i++) {
+ elem = elems[_i];
+ elem = $(elem);
+ if (!elem.data("editor") && !elem.hasClass("editor")) {
+ editor = new InlineEditor(elem, this.getContent, this.saveContent, this.getObject);
+ elem.data("editor", editor);
+ }
+ }
+ return this.logEnd("Adding inline editors");
+ };
+
+ ZeroBlog.prototype.checkPublishbar = function() {
+ if (!this.site_modified || this.site_modified > this.site_info.content.modified) {
+ return $(".publishbar").addClass("visible");
+ } else {
+ return $(".publishbar").removeClass("visible");
+ }
+ };
+
+ ZeroBlog.prototype.publish = function() {
+ this.cmd("wrapperPrompt", ["Enter your private key:", "password"], (function(_this) {
+ return function(privatekey) {
+ $(".publishbar .button").addClass("loading");
+ return _this.cmd("sitePublish", [privatekey], function(res) {
+ $(".publishbar .button").removeClass("loading");
+ return _this.log("Publish result:", res);
+ });
+ };
+ })(this));
+ return false;
+ };
+
+ ZeroBlog.prototype.applyPostdata = function(elem, post, full) {
+ var body, date_published, title_hash;
+ if (full == null) {
+ full = false;
+ }
+ title_hash = post.title.replace(/[#?& ]/g, "+").replace(/[+]+/g, "+");
+ elem.data("object", "Post:" + post.post_id);
+ $(".title .editable", elem).html(post.title).attr("href", "?Post:" + post.post_id + ":" + title_hash).data("content", post.title);
+ date_published = Time.since(post.date_published);
+ if (post.body.match(/^---/m)) {
+ date_published += " · " + (Time.readtime(post.body));
+ $(".more", elem).css("display", "inline-block").attr("href", "?Post:" + post.post_id + ":" + title_hash);
+ }
+ $(".details .published", elem).html(date_published).data("content", post.date_published);
+ if (post.comments > 0) {
+ $(".details .comments-num", elem).css("display", "inline").attr("href", "?Post:" + post.post_id + ":" + title_hash + "#Comments");
+ $(".details .comments-num .num", elem).text(post.comments + " comments");
+ } else {
+ $(".details .comments-num", elem).css("display", "none");
+ }
+ if (full) {
+ body = post.body;
+ } else {
+ body = post.body.replace(/^([\s\S]*?)\n---\n[\s\S]*$/, "$1");
+ }
+ return $(".body", elem).html(Text.toMarked(body)).data("content", post.body);
+ };
+
+ ZeroBlog.prototype.onOpenWebsocket = function(e) {
+ this.loadData();
+ this.routeUrl(window.location.search.substring(1));
+ this.cmd("siteInfo", {}, this.setSiteinfo);
+ return this.cmd("serverInfo", {}, (function(_this) {
+ return function(ret) {
+ _this.server_info = ret;
+ if (_this.server_info.rev < 160) {
+ return _this.loadData("old");
+ }
+ };
+ })(this));
+ };
+
+ ZeroBlog.prototype.getObject = function(elem) {
+ return elem.parents("[data-object]:first");
+ };
+
+ ZeroBlog.prototype.getContent = function(elem, raw) {
+ var content, id, type, _ref;
+ if (raw == null) {
+ raw = false;
+ }
+ _ref = this.getObject(elem).data("object").split(":"), type = _ref[0], id = _ref[1];
+ id = parseInt(id);
+ content = elem.data("content");
+ if (elem.data("editable-mode") === "timestamp") {
+ content = Time.date(content, "full");
+ }
+ if (elem.data("editable-mode") === "simple" || raw) {
+ return content;
+ } else {
+ return Text.toMarked(content);
+ }
+ };
+
+ ZeroBlog.prototype.saveContent = function(elem, content, cb) {
+ var id, type, _ref;
+ if (cb == null) {
+ cb = false;
+ }
+ if (elem.data("deletable") && content === null) {
+ return this.deleteObject(elem, cb);
+ }
+ elem.data("content", content);
+ _ref = this.getObject(elem).data("object").split(":"), type = _ref[0], id = _ref[1];
+ id = parseInt(id);
+ if (type === "Post" || type === "Site") {
+ return this.saveSite(elem, type, id, content, cb);
+ } else if (type === "Comment") {
+ return this.saveComment(elem, type, id, content, cb);
+ }
+ };
+
+ ZeroBlog.prototype.saveSite = function(elem, type, id, content, cb) {
+ return this.cmd("fileGet", ["data/data.json"], (function(_this) {
+ return function(res) {
+ var data, post;
+ data = JSON.parse(res);
+ if (type === "Post") {
+ post = ((function() {
+ var _i, _len, _ref, _results;
+ _ref = data.post;
+ _results = [];
+ for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+ post = _ref[_i];
+ if (post.post_id === id) {
+ _results.push(post);
+ }
+ }
+ return _results;
+ })())[0];
+ if (elem.data("editable-mode") === "timestamp") {
+ content = Time.timestamp(content);
+ }
+ post[elem.data("editable")] = content;
+ } else if (type === "Site") {
+ data[elem.data("editable")] = content;
+ }
+ return _this.writeData(data, function(res) {
+ if (cb) {
+ if (res === true) {
+ if (elem.data("editable-mode") === "simple") {
+ return cb(content);
+ } else if (elem.data("editable-mode") === "timestamp") {
+ return cb(Time.since(content));
+ } else {
+ return cb(Text.toMarked(content));
+ }
+ } else {
+ return cb(false);
+ }
+ }
+ });
+ };
+ })(this));
+ };
+
+ ZeroBlog.prototype.saveComment = function(elem, type, id, content, cb) {
+ var inner_path;
+ this.log("Saving comment...", id);
+ this.getObject(elem).css("height", "auto");
+ inner_path = "data/users/" + Page.site_info.auth_address + "/data.json";
+ return Page.cmd("fileGet", {
+ "inner_path": inner_path,
+ "required": false
+ }, (function(_this) {
+ return function(data) {
+ var comment, json_raw;
+ data = JSON.parse(data);
+ comment = ((function() {
+ var _i, _len, _ref, _results;
+ _ref = data.comment;
+ _results = [];
+ for (_i = 0, _len = _ref.length; _i < _len; _i++) {
+ comment = _ref[_i];
+ if (comment.comment_id === id) {
+ _results.push(comment);
+ }
+ }
+ return _results;
+ })())[0];
+ comment[elem.data("editable")] = content;
+ _this.log(data);
+ json_raw = unescape(encodeURIComponent(JSON.stringify(data, void 0, '\t')));
+ return _this.writePublish(inner_path, btoa(json_raw), function(res) {
+ if (res === true) {
+ Comments.checkCert("updaterules");
+ if (cb) {
+ return cb(Text.toMarked(content, {
+ "sanitize": true
+ }));
+ }
+ } else {
+ _this.cmd("wrapperNotification", ["error", "File write error: " + res]);
+ if (cb) {
+ return cb(false);
+ }
+ }
+ });
+ };
+ })(this));
+ };
+
+ ZeroBlog.prototype.deleteObject = function(elem, cb) {
+ var id, inner_path, type, _ref;
+ if (cb == null) {
+ cb = False;
+ }
+ _ref = elem.data("object").split(":"), type = _ref[0], id = _ref[1];
+ id = parseInt(id);
+ if (type === "Post") {
+ return this.cmd("fileGet", ["data/data.json"], (function(_this) {
+ return function(res) {
+ var data, post;
+ data = JSON.parse(res);
+ if (type === "Post") {
+ post = ((function() {
+ var _i, _len, _ref1, _results;
+ _ref1 = data.post;
+ _results = [];
+ for (_i = 0, _len = _ref1.length; _i < _len; _i++) {
+ post = _ref1[_i];
+ if (post.post_id === id) {
+ _results.push(post);
+ }
+ }
+ return _results;
+ })())[0];
+ if (!post) {
+ return false;
+ }
+ data.post.splice(data.post.indexOf(post), 1);
+ return _this.writeData(data, function(res) {
+ if (cb) {
+ cb();
+ }
+ if (res === true) {
+ return elem.slideUp();
+ }
+ });
+ }
+ };
+ })(this));
+ } else if (type === "Comment") {
+ inner_path = "data/users/" + Page.site_info.auth_address + "/data.json";
+ return this.cmd("fileGet", {
+ "inner_path": inner_path,
+ "required": false
+ }, (function(_this) {
+ return function(data) {
+ var comment, json_raw;
+ data = JSON.parse(data);
+ comment = ((function() {
+ var _i, _len, _ref1, _results;
+ _ref1 = data.comment;
+ _results = [];
+ for (_i = 0, _len = _ref1.length; _i < _len; _i++) {
+ comment = _ref1[_i];
+ if (comment.comment_id === id) {
+ _results.push(comment);
+ }
+ }
+ return _results;
+ })())[0];
+ data.comment.splice(data.comment.indexOf(comment), 1);
+ json_raw = unescape(encodeURIComponent(JSON.stringify(data, void 0, '\t')));
+ return _this.writePublish(inner_path, btoa(json_raw), function(res) {
+ if (res === true) {
+ elem.slideUp();
+ }
+ if (cb) {
+ return cb();
+ }
+ });
+ };
+ })(this));
+ }
+ };
+
+ ZeroBlog.prototype.writeData = function(data, cb) {
+ var json_raw;
+ if (cb == null) {
+ cb = null;
+ }
+ if (!data) {
+ return this.log("Data missing");
+ }
+ this.data["modified"] = data.modified = Time.timestamp();
+ json_raw = unescape(encodeURIComponent(JSON.stringify(data, void 0, '\t')));
+ this.cmd("fileWrite", ["data/data.json", btoa(json_raw)], (function(_this) {
+ return function(res) {
+ if (res === "ok") {
+ if (cb) {
+ cb(true);
+ }
+ } else {
+ _this.cmd("wrapperNotification", ["error", "File write error: " + res]);
+ if (cb) {
+ cb(false);
+ }
+ }
+ return _this.checkPublishbar();
+ };
+ })(this));
+ return this.cmd("fileGet", ["content.json"], (function(_this) {
+ return function(content) {
+ content = content.replace(/"title": ".*?"/, "\"title\": \"" + data.title + "\"");
+ return _this.cmd("fileWrite", ["content.json", btoa(content)], function(res) {
+ if (res !== "ok") {
+ return _this.cmd("wrapperNotification", ["error", "Content.json write error: " + res]);
+ }
+ });
+ };
+ })(this));
+ };
+
+ ZeroBlog.prototype.writePublish = function(inner_path, data, cb) {
+ return this.cmd("fileWrite", [inner_path, data], (function(_this) {
+ return function(res) {
+ if (res !== "ok") {
+ _this.cmd("wrapperNotification", ["error", "File write error: " + res]);
+ cb(false);
+ return false;
+ }
+ return _this.cmd("sitePublish", {
+ "inner_path": inner_path
+ }, function(res) {
+ if (res === "ok") {
+ return cb(true);
+ } else {
+ return cb(res);
+ }
+ });
+ };
+ })(this));
+ };
+
+ ZeroBlog.prototype.onRequest = function(cmd, message) {
+ if (cmd === "setSiteInfo") {
+ return this.actionSetSiteInfo(message);
+ } else {
+ return this.log("Unknown command", message);
+ }
+ };
+
+ ZeroBlog.prototype.actionSetSiteInfo = function(message) {
+ this.setSiteinfo(message.params);
+ return this.checkPublishbar();
+ };
+
+ ZeroBlog.prototype.setSiteinfo = function(site_info) {
+ var _ref, _ref1;
+ this.site_info = site_info;
+ this.event_site_info.resolve(site_info);
+ if ($("body").hasClass("page-post")) {
+ Comments.checkCert();
+ }
+ if (((_ref = site_info.event) != null ? _ref[0] : void 0) === "file_done" && site_info.event[1].match(/.*users.*data.json$/)) {
+ if ($("body").hasClass("page-post")) {
+ Comments.loadComments();
+ }
+ if ($("body").hasClass("page-main")) {
+ return RateLimit(500, (function(_this) {
+ return function() {
+ return _this.pageMain();
+ };
+ })(this));
+ }
+ } else if (((_ref1 = site_info.event) != null ? _ref1[0] : void 0) === "file_done" && site_info.event[1] === "data/data.json") {
+ this.loadData();
+ if ($("body").hasClass("page-main")) {
+ this.pageMain();
+ }
+ if ($("body").hasClass("page-post")) {
+ return this.pagePost();
+ }
+ } else {
+
+ }
+ };
+
+ return ZeroBlog;
+
+ })(ZeroFrame);
+
+ window.Page = new ZeroBlog();
+
+}).call(this);
diff --git a/src/Tor/TorManager.py b/src/Tor/TorManager.py
new file mode 100644
index 00000000..8de33b4b
--- /dev/null
+++ b/src/Tor/TorManager.py
@@ -0,0 +1,309 @@
+import logging
+import re
+import socket
+import binascii
+import sys
+import os
+import time
+import random
+import subprocess
+import atexit
+
+import gevent
+
+from Config import config
+from Crypt import CryptRsa
+from Crypt import CryptEd25519
+from Site import SiteManager
+import socks
+from gevent.lock import RLock
+from Debug import Debug
+from Plugin import PluginManager
+
+
+@PluginManager.acceptPlugins
+class TorManager(object):
+ def __init__(self, fileserver_ip=None, fileserver_port=None):
+ self.privatekeys = {} # Onion: Privatekey
+ self.site_onions = {} # Site address: Onion
+ self.tor_exe = "tools/tor/tor.exe"
+ self.has_meek_bridges = os.path.isfile("tools/tor/PluggableTransports/meek-client.exe")
+ self.tor_process = None
+ self.log = logging.getLogger("TorManager")
+ self.start_onions = None
+ self.conn = None
+ self.lock = RLock()
+ self.starting = True
+ self.connecting = True
+ self.status = None
+ self.event_started = gevent.event.AsyncResult()
+
+ if config.tor == "disable":
+ self.enabled = False
+ self.start_onions = False
+ self.setStatus("Disabled")
+ else:
+ self.enabled = True
+ self.setStatus("Waiting")
+
+ if fileserver_port:
+ self.fileserver_port = fileserver_port
+ else:
+ self.fileserver_port = config.fileserver_port
+
+ self.ip, self.port = config.tor_controller.rsplit(":", 1)
+ self.port = int(self.port)
+
+ self.proxy_ip, self.proxy_port = config.tor_proxy.rsplit(":", 1)
+ self.proxy_port = int(self.proxy_port)
+
+ def start(self):
+ self.log.debug("Starting (Tor: %s)" % config.tor)
+ self.starting = True
+ try:
+ if not self.connect():
+ raise Exception(self.status)
+ self.log.debug("Tor proxy port %s check ok" % config.tor_proxy)
+ except Exception as err:
+ if sys.platform.startswith("win") and os.path.isfile(self.tor_exe):
+ self.log.info("Starting self-bundled Tor, due to Tor proxy port %s check error: %s" % (config.tor_proxy, err))
+ # Change to self-bundled Tor ports
+ self.port = 49051
+ self.proxy_port = 49050
+ if config.tor == "always":
+ socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", self.proxy_port)
+ self.enabled = True
+ if not self.connect():
+ self.startTor()
+ else:
+ self.log.info("Disabling Tor, because error while accessing Tor proxy at port %s: %s" % (config.tor_proxy, err))
+ self.enabled = False
+
+ def setStatus(self, status):
+ self.status = status
+ if "main" in sys.modules: # import main has side-effects, breaks tests
+ import main
+ if "ui_server" in dir(main):
+ main.ui_server.updateWebsocket()
+
+ def startTor(self):
+ if sys.platform.startswith("win"):
+ try:
+ self.log.info("Starting Tor client %s..." % self.tor_exe)
+ tor_dir = os.path.dirname(self.tor_exe)
+ startupinfo = subprocess.STARTUPINFO()
+ startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
+ cmd = r"%s -f torrc --defaults-torrc torrc-defaults --ignore-missing-torrc" % self.tor_exe
+ if config.tor_use_bridges:
+ cmd += " --UseBridges 1"
+
+ self.tor_process = subprocess.Popen(cmd, cwd=tor_dir, close_fds=True, startupinfo=startupinfo)
+ for wait in range(1, 3): # Wait for startup
+ time.sleep(wait * 0.5)
+ self.enabled = True
+ if self.connect():
+ if self.isSubprocessRunning():
+ self.request("TAKEOWNERSHIP") # Shut down Tor client when controll connection closed
+ break
+ # Terminate on exit
+ atexit.register(self.stopTor)
+ except Exception as err:
+ self.log.error("Error starting Tor client: %s" % Debug.formatException(str(err)))
+ self.enabled = False
+ self.starting = False
+ self.event_started.set(False)
+ return False
+
+ def isSubprocessRunning(self):
+ return self.tor_process and self.tor_process.pid and self.tor_process.poll() is None
+
+ def stopTor(self):
+ self.log.debug("Stopping...")
+ try:
+ if self.isSubprocessRunning():
+ self.request("SIGNAL SHUTDOWN")
+ except Exception as err:
+ self.log.error("Error stopping Tor: %s" % err)
+
+ def connect(self):
+ if not self.enabled:
+ return False
+ self.site_onions = {}
+ self.privatekeys = {}
+
+ return self.connectController()
+
+ def connectController(self):
+ if "socket_noproxy" in dir(socket): # Socket proxy-patched, use non-proxy one
+ conn = socket.socket_noproxy(socket.AF_INET, socket.SOCK_STREAM)
+ else:
+ conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+
+ self.log.debug("Connecting to Tor Controller %s:%s" % (self.ip, self.port))
+ self.connecting = True
+ try:
+ with self.lock:
+ conn.connect((self.ip, self.port))
+
+ # Auth cookie file
+ res_protocol = self.send("PROTOCOLINFO", conn)
+ cookie_match = re.search('COOKIEFILE="(.*?)"', res_protocol)
+
+ if config.tor_password:
+ res_auth = self.send('AUTHENTICATE "%s"' % config.tor_password, conn)
+ elif cookie_match:
+ cookie_file = cookie_match.group(1).encode("ascii").decode("unicode_escape")
+ if not os.path.isfile(cookie_file) and self.tor_process:
+ # Workaround for tor client cookie auth file utf8 encoding bug (https://github.com/torproject/stem/issues/57)
+ cookie_file = os.path.dirname(self.tor_exe) + "\\data\\control_auth_cookie"
+ auth_hex = binascii.b2a_hex(open(cookie_file, "rb").read())
+ res_auth = self.send("AUTHENTICATE %s" % auth_hex.decode("utf8"), conn)
+ else:
+ res_auth = self.send("AUTHENTICATE", conn)
+
+ if "250 OK" not in res_auth:
+ raise Exception("Authenticate error %s" % res_auth)
+
+ # Version 0.2.7.5 required because ADD_ONION support
+ res_version = self.send("GETINFO version", conn)
+ version = re.search(r'version=([0-9\.]+)', res_version).group(1)
+ if float(version.replace(".", "0", 2)) < 207.5:
+ raise Exception("Tor version >=0.2.7.5 required, found: %s" % version)
+
+ self.setStatus("Connected (%s)" % res_auth)
+ self.event_started.set(True)
+ self.starting = False
+ self.connecting = False
+ self.conn = conn
+ except Exception as err:
+ self.conn = None
+ self.setStatus("Error (%s)" % str(err))
+ self.log.warning("Tor controller connect error: %s" % Debug.formatException(str(err)))
+ self.enabled = False
+ return self.conn
+
+ def disconnect(self):
+ if self.conn:
+ self.conn.close()
+ self.conn = None
+
+ def startOnions(self):
+ if self.enabled:
+ self.log.debug("Start onions")
+ self.start_onions = True
+ self.getOnion("global")
+
+ # Get new exit node ip
+ def resetCircuits(self):
+ res = self.request("SIGNAL NEWNYM")
+ if "250 OK" not in res:
+ self.setStatus("Reset circuits error (%s)" % res)
+ self.log.error("Tor reset circuits error: %s" % res)
+
+ def addOnion(self):
+ if len(self.privatekeys) >= config.tor_hs_limit:
+ return random.choice([key for key in list(self.privatekeys.keys()) if key != self.site_onions.get("global")])
+
+ result = self.makeOnionAndKey()
+ if result:
+ onion_address, onion_privatekey = result
+ self.privatekeys[onion_address] = onion_privatekey
+ self.setStatus("OK (%s onions running)" % len(self.privatekeys))
+ SiteManager.peer_blacklist.append((onion_address + ".onion", self.fileserver_port))
+ return onion_address
+ else:
+ return False
+
+ def makeOnionAndKey(self):
+ res = self.request("ADD_ONION NEW:ED25519-V3 port=%s" % self.fileserver_port)
+ match = re.search("ServiceID=([A-Za-z0-9]+).*PrivateKey=ED25519-V3:(.*?)[\r\n]", res, re.DOTALL)
+ if match:
+ onion_address, onion_privatekey = match.groups()
+ return (onion_address, onion_privatekey)
+ else:
+ self.setStatus("AddOnion error (%s)" % res)
+ self.log.error("Tor addOnion error: %s" % res)
+ return False
+
+ def delOnion(self, address):
+ res = self.request("DEL_ONION %s" % address)
+ if "250 OK" in res:
+ del self.privatekeys[address]
+ self.setStatus("OK (%s onion running)" % len(self.privatekeys))
+ return True
+ else:
+ self.setStatus("DelOnion error (%s)" % res)
+ self.log.error("Tor delOnion error: %s" % res)
+ self.disconnect()
+ return False
+
+ def request(self, cmd):
+ with self.lock:
+ if not self.enabled:
+ return False
+ if not self.conn:
+ if not self.connect():
+ return ""
+ return self.send(cmd)
+
+ def send(self, cmd, conn=None):
+ if not conn:
+ conn = self.conn
+ self.log.debug("> %s" % cmd)
+ back = ""
+ for retry in range(2):
+ try:
+ conn.sendall(b"%s\r\n" % cmd.encode("utf8"))
+ while not back.endswith("250 OK\r\n"):
+ back += conn.recv(1024 * 64).decode("utf8")
+ break
+ except Exception as err:
+ self.log.error("Tor send error: %s, reconnecting..." % err)
+ if not self.connecting:
+ self.disconnect()
+ time.sleep(1)
+ self.connect()
+ back = None
+ if back:
+ self.log.debug("< %s" % back.strip())
+ return back
+
+ def getPrivatekey(self, address):
+ return self.privatekeys[address]
+
+ def getPublickey(self, address):
+ return CryptRsa.privatekeyToPublickey(self.privatekeys[address])
+
+ def getOnion(self, site_address):
+ if not self.enabled:
+ return None
+
+ if config.tor == "always": # Different onion for every site
+ onion = self.site_onions.get(site_address)
+ else: # Same onion for every site
+ onion = self.site_onions.get("global")
+ site_address = "global"
+
+ if not onion:
+ with self.lock:
+ self.site_onions[site_address] = self.addOnion()
+ onion = self.site_onions[site_address]
+ self.log.debug("Created new hidden service for %s: %s" % (site_address, onion))
+
+ return onion
+
+ # Creates and returns a
+ # socket that has connected to the Tor Network
+ def createSocket(self, onion, port):
+ if not self.enabled:
+ return False
+ self.log.debug("Creating new Tor socket to %s:%s" % (onion, port))
+ if self.starting:
+ self.log.debug("Waiting for startup...")
+ self.event_started.get()
+ if config.tor == "always": # Every socket is proxied by default, in this mode
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ else:
+ sock = socks.socksocket()
+ sock.set_proxy(socks.SOCKS5, self.proxy_ip, self.proxy_port)
+ return sock
diff --git a/src/Tor/__init__.py b/src/Tor/__init__.py
new file mode 100644
index 00000000..d0fcffaf
--- /dev/null
+++ b/src/Tor/__init__.py
@@ -0,0 +1 @@
+from .TorManager import TorManager
\ No newline at end of file