Add files via upload

This commit is contained in:
wupg98 2023-09-02 08:10:50 +02:00 committed by GitHub
parent 9c2d826362
commit c8881407f6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
75 changed files with 7765 additions and 0 deletions

162
src/Test/BenchmarkSsl.py Normal file
View File

@ -0,0 +1,162 @@
#!/usr/bin/python2
from gevent import monkey
monkey.patch_all()
import os
import time
import sys
import socket
import ssl
sys.path.append(os.path.abspath("..")) # Imports relative to src dir
import io as StringIO
import gevent
from gevent.server import StreamServer
from gevent.pool import Pool
from Config import config
config.parse()
from util import SslPatch
# Server
socks = []
data = os.urandom(1024 * 100)
data += "\n"
def handle(sock_raw, addr):
socks.append(sock_raw)
sock = sock_raw
# sock = ctx.wrap_socket(sock, server_side=True)
# if sock_raw.recv( 1, gevent.socket.MSG_PEEK ) == "\x16":
# sock = gevent.ssl.wrap_socket(sock_raw, server_side=True, keyfile='key-cz.pem',
# certfile='cert-cz.pem', ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1)
# fp = os.fdopen(sock.fileno(), 'rb', 1024*512)
try:
while True:
line = sock.recv(16 * 1024)
if not line:
break
if line == "bye\n":
break
elif line == "gotssl\n":
sock.sendall("yes\n")
sock = gevent.ssl.wrap_socket(
sock_raw, server_side=True, keyfile='../../data/key-rsa.pem', certfile='../../data/cert-rsa.pem',
ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1
)
else:
sock.sendall(data)
except Exception as err:
print(err)
try:
sock.shutdown(gevent.socket.SHUT_WR)
sock.close()
except:
pass
socks.remove(sock_raw)
pool = Pool(1000) # do not accept more than 10000 connections
server = StreamServer(('127.0.0.1', 1234), handle)
server.start()
# Client
total_num = 0
total_bytes = 0
clipher = None
ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:ECDH+AES128:ECDHE-RSA-AES128-GCM-SHA256:AES128-GCM-SHA256:AES128-SHA256:AES128-SHA:HIGH:" + \
"!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK"
# ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
def getData():
global total_num, total_bytes, clipher
data = None
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# sock = socket.ssl(s)
# sock = ssl.wrap_socket(sock)
sock.connect(("127.0.0.1", 1234))
# sock.do_handshake()
# clipher = sock.cipher()
sock.send("gotssl\n")
if sock.recv(128) == "yes\n":
sock = ssl.wrap_socket(sock, ciphers=ciphers, ssl_version=ssl.PROTOCOL_TLSv1)
sock.do_handshake()
clipher = sock.cipher()
for req in range(20):
sock.sendall("req\n")
buff = StringIO.StringIO()
data = sock.recv(16 * 1024)
buff.write(data)
if not data:
break
while not data.endswith("\n"):
data = sock.recv(16 * 1024)
if not data:
break
buff.write(data)
total_num += 1
total_bytes += buff.tell()
if not data:
print("No data")
sock.shutdown(gevent.socket.SHUT_WR)
sock.close()
s = time.time()
def info():
import psutil
import os
process = psutil.Process(os.getpid())
if "memory_info" in dir(process):
memory_info = process.memory_info
else:
memory_info = process.get_memory_info
while 1:
print(total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s, end=' ')
print("using", clipher, "Mem:", memory_info()[0] / float(2 ** 20))
time.sleep(1)
gevent.spawn(info)
for test in range(1):
clients = []
for i in range(500): # Thread
clients.append(gevent.spawn(getData))
gevent.joinall(clients)
print(total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s)
# Separate client/server process:
# 10*10*100:
# Raw: 10000 req 1000009 kbytes transfered in 5.39999985695
# RSA 2048: 10000 req 1000009 kbytes transfered in 27.7890000343 using ('ECDHE-RSA-AES256-SHA', 'TLSv1/SSLv3', 256)
# ECC: 10000 req 1000009 kbytes transfered in 26.1959998608 using ('ECDHE-ECDSA-AES256-SHA', 'TLSv1/SSLv3', 256)
# ECC: 10000 req 1000009 kbytes transfered in 28.2410001755 using ('ECDHE-ECDSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 13.3828125
#
# 10*100*10:
# Raw: 10000 req 1000009 kbytes transfered in 7.02700018883 Mem: 14.328125
# RSA 2048: 10000 req 1000009 kbytes transfered in 44.8860001564 using ('ECDHE-RSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 20.078125
# ECC: 10000 req 1000009 kbytes transfered in 37.9430000782 using ('ECDHE-ECDSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 20.0234375
#
# 1*100*100:
# Raw: 10000 req 1000009 kbytes transfered in 4.64400005341 Mem: 14.06640625
# RSA: 10000 req 1000009 kbytes transfered in 24.2300000191 using ('ECDHE-RSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 19.7734375
# ECC: 10000 req 1000009 kbytes transfered in 22.8849999905 using ('ECDHE-ECDSA-AES256-GCM-SHA384', 'TLSv1/SSLv3', 256) Mem: 17.8125
# AES128: 10000 req 1000009 kbytes transfered in 21.2839999199 using ('AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 14.1328125
# ECC+128: 10000 req 1000009 kbytes transfered in 20.496999979 using ('ECDHE-ECDSA-AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 14.40234375
#
#
# Single process:
# 1*100*100
# RSA: 10000 req 1000009 kbytes transfered in 41.7899999619 using ('ECDHE-RSA-AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 26.91015625
#
# 10*10*100
# RSA: 10000 req 1000009 kbytes transfered in 40.1640000343 using ('ECDHE-RSA-AES128-GCM-SHA256', 'TLSv1/SSLv3', 128) Mem: 14.94921875

23
src/Test/Spy.py Normal file
View File

@ -0,0 +1,23 @@
import logging
class Spy:
def __init__(self, obj, func_name):
self.obj = obj
self.__name__ = func_name
self.func_original = getattr(self.obj, func_name)
self.calls = []
def __enter__(self, *args, **kwargs):
logging.debug("Spy started")
def loggedFunc(cls, *args, **kwargs):
call = dict(enumerate(args, 1))
call[0] = cls
call.update(kwargs)
logging.debug("Spy call: %s" % call)
self.calls.append(call)
return self.func_original(cls, *args, **kwargs)
setattr(self.obj, self.__name__, loggedFunc)
return self.calls
def __exit__(self, *args, **kwargs):
setattr(self.obj, self.__name__, self.func_original)

59
src/Test/TestCached.py Normal file
View File

@ -0,0 +1,59 @@
import time
from util import Cached
class CachedObject:
def __init__(self):
self.num_called_add = 0
self.num_called_multiply = 0
self.num_called_none = 0
@Cached(timeout=1)
def calcAdd(self, a, b):
self.num_called_add += 1
return a + b
@Cached(timeout=1)
def calcMultiply(self, a, b):
self.num_called_multiply += 1
return a * b
@Cached(timeout=1)
def none(self):
self.num_called_none += 1
return None
class TestCached:
def testNoneValue(self):
cached_object = CachedObject()
assert cached_object.none() is None
assert cached_object.none() is None
assert cached_object.num_called_none == 1
time.sleep(2)
assert cached_object.none() is None
assert cached_object.num_called_none == 2
def testCall(self):
cached_object = CachedObject()
assert cached_object.calcAdd(1, 2) == 3
assert cached_object.calcAdd(1, 2) == 3
assert cached_object.calcMultiply(1, 2) == 2
assert cached_object.calcMultiply(1, 2) == 2
assert cached_object.num_called_add == 1
assert cached_object.num_called_multiply == 1
assert cached_object.calcAdd(2, 3) == 5
assert cached_object.calcAdd(2, 3) == 5
assert cached_object.num_called_add == 2
assert cached_object.calcAdd(1, 2) == 3
assert cached_object.calcMultiply(2, 3) == 6
assert cached_object.num_called_add == 2
assert cached_object.num_called_multiply == 2
time.sleep(2)
assert cached_object.calcAdd(1, 2) == 3
assert cached_object.num_called_add == 3

31
src/Test/TestConfig.py Normal file
View File

@ -0,0 +1,31 @@
import pytest
import Config
@pytest.mark.usefixtures("resetSettings")
class TestConfig:
def testParse(self):
# Defaults
config_test = Config.Config("zeronet.py".split(" "))
config_test.parse(silent=True, parse_config=False)
assert not config_test.debug
assert not config_test.debug_socket
# Test parse command line with unknown parameters (ui_password)
config_test = Config.Config("zeronet.py --debug --debug_socket --ui_password hello".split(" "))
config_test.parse(silent=True, parse_config=False)
assert config_test.debug
assert config_test.debug_socket
with pytest.raises(AttributeError):
config_test.ui_password
# More complex test
args = "zeronet.py --unknown_arg --debug --debug_socket --ui_restrict 127.0.0.1 1.2.3.4 "
args += "--another_unknown argument --use_openssl False siteSign address privatekey --inner_path users/content.json"
config_test = Config.Config(args.split(" "))
config_test.parse(silent=True, parse_config=False)
assert config_test.debug
assert "1.2.3.4" in config_test.ui_restrict
assert not config_test.use_openssl
assert config_test.inner_path == "users/content.json"

View File

@ -0,0 +1,118 @@
import time
import socket
import gevent
import pytest
import mock
from Crypt import CryptConnection
from Connection import ConnectionServer
from Config import config
@pytest.mark.usefixtures("resetSettings")
class TestConnection:
def testIpv6(self, file_server6):
assert ":" in file_server6.ip
client = ConnectionServer(file_server6.ip, 1545)
connection = client.getConnection(file_server6.ip, 1544)
assert connection.ping()
# Close connection
connection.close()
client.stop()
time.sleep(0.01)
assert len(file_server6.connections) == 0
# Should not able to reach on ipv4 ip
with pytest.raises(socket.error) as err:
client = ConnectionServer("127.0.0.1", 1545)
connection = client.getConnection("127.0.0.1", 1544)
def testSslConnection(self, file_server):
client = ConnectionServer(file_server.ip, 1545)
assert file_server != client
# Connect to myself
with mock.patch('Config.config.ip_local', return_value=[]): # SSL not used for local ips
connection = client.getConnection(file_server.ip, 1544)
assert len(file_server.connections) == 1
assert connection.handshake
assert connection.crypt
# Close connection
connection.close("Test ended")
client.stop()
time.sleep(0.1)
assert len(file_server.connections) == 0
assert file_server.num_incoming == 2 # One for file_server fixture, one for the test
def testRawConnection(self, file_server):
client = ConnectionServer(file_server.ip, 1545)
assert file_server != client
# Remove all supported crypto
crypt_supported_bk = CryptConnection.manager.crypt_supported
CryptConnection.manager.crypt_supported = []
with mock.patch('Config.config.ip_local', return_value=[]): # SSL not used for local ips
connection = client.getConnection(file_server.ip, 1544)
assert len(file_server.connections) == 1
assert not connection.crypt
# Close connection
connection.close()
client.stop()
time.sleep(0.01)
assert len(file_server.connections) == 0
# Reset supported crypts
CryptConnection.manager.crypt_supported = crypt_supported_bk
def testPing(self, file_server, site):
client = ConnectionServer(file_server.ip, 1545)
connection = client.getConnection(file_server.ip, 1544)
assert connection.ping()
connection.close()
client.stop()
def testGetConnection(self, file_server):
client = ConnectionServer(file_server.ip, 1545)
connection = client.getConnection(file_server.ip, 1544)
# Get connection by ip/port
connection2 = client.getConnection(file_server.ip, 1544)
assert connection == connection2
# Get connection by peerid
assert not client.getConnection(file_server.ip, 1544, peer_id="notexists", create=False)
connection2 = client.getConnection(file_server.ip, 1544, peer_id=connection.handshake["peer_id"], create=False)
assert connection2 == connection
connection.close()
client.stop()
def testFloodProtection(self, file_server):
whitelist = file_server.whitelist # Save for reset
file_server.whitelist = [] # Disable 127.0.0.1 whitelist
client = ConnectionServer(file_server.ip, 1545)
# Only allow 6 connection in 1 minute
for reconnect in range(6):
connection = client.getConnection(file_server.ip, 1544)
assert connection.handshake
connection.close()
# The 7. one will timeout
with pytest.raises(gevent.Timeout):
with gevent.Timeout(0.1):
connection = client.getConnection(file_server.ip, 1544)
# Reset whitelist
file_server.whitelist = whitelist

273
src/Test/TestContent.py Normal file
View File

@ -0,0 +1,273 @@
import json
import time
import io
import pytest
from Crypt import CryptBitcoin
from Content.ContentManager import VerifyError, SignError
from util.SafeRe import UnsafePatternError
@pytest.mark.usefixtures("resetSettings")
class TestContent:
privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv"
def testInclude(self, site):
# Rules defined in parent content.json
rules = site.content_manager.getRules("data/test_include/content.json")
assert rules["signers"] == ["15ik6LeBWnACWfaika1xqGapRZ1zh3JpCo"] # Valid signer
assert rules["user_name"] == "test" # Extra data
assert rules["max_size"] == 20000 # Max size of files
assert not rules["includes_allowed"] # Don't allow more includes
assert rules["files_allowed"] == "data.json" # Allowed file pattern
# Valid signers for "data/test_include/content.json"
valid_signers = site.content_manager.getValidSigners("data/test_include/content.json")
assert "15ik6LeBWnACWfaika1xqGapRZ1zh3JpCo" in valid_signers # Extra valid signer defined in parent content.json
assert "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" in valid_signers # The site itself
assert len(valid_signers) == 2 # No more
# Valid signers for "data/users/content.json"
valid_signers = site.content_manager.getValidSigners("data/users/content.json")
assert "1LSxsKfC9S9TVXGGNSM3vPHjyW82jgCX5f" in valid_signers # Extra valid signer defined in parent content.json
assert "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" in valid_signers # The site itself
assert len(valid_signers) == 2
# Valid signers for root content.json
assert site.content_manager.getValidSigners("content.json") == ["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"]
def testInlcudeLimits(self, site, crypt_bitcoin_lib):
# Data validation
res = []
data_dict = {
"files": {
"data.json": {
"sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906",
"size": 505
}
},
"modified": time.time()
}
# Normal data
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)}
data_json = json.dumps(data_dict).encode()
data = io.BytesIO(data_json)
assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
# Reset
del data_dict["signs"]
# Too large
data_dict["files"]["data.json"]["size"] = 200000 # Emulate 2MB sized data.json
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)}
data = io.BytesIO(json.dumps(data_dict).encode())
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
assert "Include too large" in str(err.value)
# Reset
data_dict["files"]["data.json"]["size"] = 505
del data_dict["signs"]
# Not allowed file
data_dict["files"]["notallowed.exe"] = data_dict["files"]["data.json"]
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)}
data = io.BytesIO(json.dumps(data_dict).encode())
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
assert "File not allowed" in str(err.value)
# Reset
del data_dict["files"]["notallowed.exe"]
del data_dict["signs"]
# Should work again
data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)}
data = io.BytesIO(json.dumps(data_dict).encode())
assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
@pytest.mark.parametrize("inner_path", ["content.json", "data/test_include/content.json", "data/users/content.json"])
def testSign(self, site, inner_path):
# Bad privatekey
with pytest.raises(SignError) as err:
site.content_manager.sign(inner_path, privatekey="5aaa3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMnaa", filewrite=False)
assert "Private key invalid" in str(err.value)
# Good privatekey
content = site.content_manager.sign(inner_path, privatekey=self.privatekey, filewrite=False)
content_old = site.content_manager.contents[inner_path] # Content before the sign
assert not content_old == content # Timestamp changed
assert site.address in content["signs"] # Used the site's private key to sign
if inner_path == "content.json":
assert len(content["files"]) == 17
elif inner_path == "data/test-include/content.json":
assert len(content["files"]) == 1
elif inner_path == "data/users/content.json":
assert len(content["files"]) == 0
# Everything should be same as before except the modified timestamp and the signs
assert (
{key: val for key, val in content_old.items() if key not in ["modified", "signs", "sign", "zeronet_version"]}
==
{key: val for key, val in content.items() if key not in ["modified", "signs", "sign", "zeronet_version"]}
)
def testSignOptionalFiles(self, site):
for hash in list(site.content_manager.hashfield):
site.content_manager.hashfield.remove(hash)
assert len(site.content_manager.hashfield) == 0
site.content_manager.contents["content.json"]["optional"] = "((data/img/zero.*))"
content_optional = site.content_manager.sign(privatekey=self.privatekey, filewrite=False, remove_missing_optional=True)
del site.content_manager.contents["content.json"]["optional"]
content_nooptional = site.content_manager.sign(privatekey=self.privatekey, filewrite=False, remove_missing_optional=True)
assert len(content_nooptional.get("files_optional", {})) == 0 # No optional files if no pattern
assert len(content_optional["files_optional"]) > 0
assert len(site.content_manager.hashfield) == len(content_optional["files_optional"]) # Hashed optional files should be added to hashfield
assert len(content_nooptional["files"]) > len(content_optional["files"])
def testFileInfo(self, site):
assert "sha512" in site.content_manager.getFileInfo("index.html")
assert site.content_manager.getFileInfo("data/img/domain.png")["content_inner_path"] == "content.json"
assert site.content_manager.getFileInfo("data/users/hello.png")["content_inner_path"] == "data/users/content.json"
assert site.content_manager.getFileInfo("data/users/content.json")["content_inner_path"] == "data/users/content.json"
assert not site.content_manager.getFileInfo("notexist")
# Optional file
file_info_optional = site.content_manager.getFileInfo("data/optional.txt")
assert "sha512" in file_info_optional
assert file_info_optional["optional"] is True
# Not exists yet user content.json
assert "cert_signers" in site.content_manager.getFileInfo("data/users/unknown/content.json")
# Optional user file
file_info_optional = site.content_manager.getFileInfo("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert "sha512" in file_info_optional
assert file_info_optional["optional"] is True
def testVerify(self, site, crypt_bitcoin_lib):
inner_path = "data/test_include/content.json"
data_dict = site.storage.loadJson(inner_path)
data = io.BytesIO(json.dumps(data_dict).encode("utf8"))
# Re-sign
data_dict["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
}
assert site.content_manager.verifyFile(inner_path, data, ignore_same=False)
# Wrong address
data_dict["address"] = "Othersite"
del data_dict["signs"]
data_dict["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
}
data = io.BytesIO(json.dumps(data_dict).encode())
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(inner_path, data, ignore_same=False)
assert "Wrong site address" in str(err.value)
# Wrong inner_path
data_dict["address"] = "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"
data_dict["inner_path"] = "content.json"
del data_dict["signs"]
data_dict["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
}
data = io.BytesIO(json.dumps(data_dict).encode())
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(inner_path, data, ignore_same=False)
assert "Wrong inner_path" in str(err.value)
# Everything right again
data_dict["address"] = "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"
data_dict["inner_path"] = inner_path
del data_dict["signs"]
data_dict["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
}
data = io.BytesIO(json.dumps(data_dict).encode())
assert site.content_manager.verifyFile(inner_path, data, ignore_same=False)
def testVerifyInnerPath(self, site, crypt_bitcoin_lib):
inner_path = "content.json"
data_dict = site.storage.loadJson(inner_path)
for good_relative_path in ["data.json", "out/data.json", "Any File [by none] (1).jpg", "árvzítűrő/tükörfúrógép.txt"]:
data_dict["files"] = {good_relative_path: {"sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906", "size": 505}}
if "sign" in data_dict:
del data_dict["sign"]
del data_dict["signs"]
data_dict["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
}
data = io.BytesIO(json.dumps(data_dict).encode())
assert site.content_manager.verifyFile(inner_path, data, ignore_same=False)
for bad_relative_path in ["../data.json", "data/" * 100, "invalid|file.jpg", "con.txt", "any/con.txt"]:
data_dict["files"] = {bad_relative_path: {"sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906", "size": 505}}
if "sign" in data_dict:
del data_dict["sign"]
del data_dict["signs"]
data_dict["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)
}
data = io.BytesIO(json.dumps(data_dict).encode())
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(inner_path, data, ignore_same=False)
assert "Invalid relative path" in str(err.value)
@pytest.mark.parametrize("key", ["ignore", "optional"])
def testSignUnsafePattern(self, site, key):
site.content_manager.contents["content.json"][key] = "([a-zA-Z]+)*"
with pytest.raises(UnsafePatternError) as err:
site.content_manager.sign("content.json", privatekey=self.privatekey, filewrite=False)
assert "Potentially unsafe" in str(err.value)
def testVerifyUnsafePattern(self, site, crypt_bitcoin_lib):
site.content_manager.contents["content.json"]["includes"]["data/test_include/content.json"]["files_allowed"] = "([a-zA-Z]+)*"
with pytest.raises(UnsafePatternError) as err:
with site.storage.open("data/test_include/content.json") as data:
site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False)
assert "Potentially unsafe" in str(err.value)
site.content_manager.contents["data/users/content.json"]["user_contents"]["permission_rules"]["([a-zA-Z]+)*"] = {"max_size": 0}
with pytest.raises(UnsafePatternError) as err:
with site.storage.open("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json") as data:
site.content_manager.verifyFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", data, ignore_same=False)
assert "Potentially unsafe" in str(err.value)
def testPathValidation(self, site):
assert site.content_manager.isValidRelativePath("test.txt")
assert site.content_manager.isValidRelativePath("test/!@#$%^&().txt")
assert site.content_manager.isValidRelativePath("ÜøßÂŒƂÆÇ.txt")
assert site.content_manager.isValidRelativePath("тест.текст")
assert site.content_manager.isValidRelativePath("𝐮𝐧𝐢𝐜𝐨𝐝𝐞𝑖𝑠𝒂𝒘𝒆𝒔𝒐𝒎𝒆")
# Test rules based on https://stackoverflow.com/questions/1976007/what-characters-are-forbidden-in-windows-and-linux-directory-names
assert not site.content_manager.isValidRelativePath("any\\hello.txt") # \ not allowed
assert not site.content_manager.isValidRelativePath("/hello.txt") # Cannot start with /
assert not site.content_manager.isValidRelativePath("\\hello.txt") # Cannot start with \
assert not site.content_manager.isValidRelativePath("../hello.txt") # Not allowed .. in path
assert not site.content_manager.isValidRelativePath("\0hello.txt") # NULL character
assert not site.content_manager.isValidRelativePath("\31hello.txt") # 0-31 (ASCII control characters)
assert not site.content_manager.isValidRelativePath("any/hello.txt ") # Cannot end with space
assert not site.content_manager.isValidRelativePath("any/hello.txt.") # Cannot end with dot
assert site.content_manager.isValidRelativePath(".hello.txt") # Allow start with dot
assert not site.content_manager.isValidRelativePath("any/CON") # Protected names on Windows
assert not site.content_manager.isValidRelativePath("CON/any.txt")
assert not site.content_manager.isValidRelativePath("any/lpt1.txt")
assert site.content_manager.isValidRelativePath("any/CONAN")
assert not site.content_manager.isValidRelativePath("any/CONOUT$")
assert not site.content_manager.isValidRelativePath("a" * 256) # Max 255 characters allowed

390
src/Test/TestContentUser.py Normal file
View File

@ -0,0 +1,390 @@
import json
import io
import pytest
from Crypt import CryptBitcoin
from Content.ContentManager import VerifyError, SignError
@pytest.mark.usefixtures("resetSettings")
class TestContentUser:
def testSigners(self, site):
# File info for not existing user file
file_info = site.content_manager.getFileInfo("data/users/notexist/data.json")
assert file_info["content_inner_path"] == "data/users/notexist/content.json"
file_info = site.content_manager.getFileInfo("data/users/notexist/a/b/data.json")
assert file_info["content_inner_path"] == "data/users/notexist/content.json"
valid_signers = site.content_manager.getValidSigners("data/users/notexist/content.json")
assert valid_signers == ["14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet", "notexist", "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"]
# File info for exsitsing user file
valid_signers = site.content_manager.getValidSigners("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json")
assert '1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT' in valid_signers # The site address
assert '14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet' in valid_signers # Admin user defined in data/users/content.json
assert '1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C' in valid_signers # The user itself
assert len(valid_signers) == 3 # No more valid signers
# Valid signer for banned user
user_content = site.storage.loadJson("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json")
user_content["cert_user_id"] = "bad@zeroid.bit"
valid_signers = site.content_manager.getValidSigners("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
assert '1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT' in valid_signers # The site address
assert '14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet' in valid_signers # Admin user defined in data/users/content.json
assert '1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C' not in valid_signers # The user itself
def testRules(self, site):
# We going to manipulate it this test rules based on data/users/content.json
user_content = site.storage.loadJson("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json")
# Known user
user_content["cert_auth_type"] = "web"
user_content["cert_user_id"] = "nofish@zeroid.bit"
rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
assert rules["max_size"] == 100000
assert "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" in rules["signers"]
# Unknown user
user_content["cert_auth_type"] = "web"
user_content["cert_user_id"] = "noone@zeroid.bit"
rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
assert rules["max_size"] == 10000
assert "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" in rules["signers"]
# User with more size limit based on auth type
user_content["cert_auth_type"] = "bitmsg"
user_content["cert_user_id"] = "noone@zeroid.bit"
rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
assert rules["max_size"] == 15000
assert "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" in rules["signers"]
# Banned user
user_content["cert_auth_type"] = "web"
user_content["cert_user_id"] = "bad@zeroid.bit"
rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
assert "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" not in rules["signers"]
def testRulesAddress(self, site):
user_inner_path = "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json"
user_content = site.storage.loadJson(user_inner_path)
rules = site.content_manager.getRules(user_inner_path, user_content)
assert rules["max_size"] == 10000
assert "1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9" in rules["signers"]
users_content = site.content_manager.contents["data/users/content.json"]
# Ban user based on address
users_content["user_contents"]["permissions"]["1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9"] = False
rules = site.content_manager.getRules(user_inner_path, user_content)
assert "1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9" not in rules["signers"]
# Change max allowed size
users_content["user_contents"]["permissions"]["1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9"] = {"max_size": 20000}
rules = site.content_manager.getRules(user_inner_path, user_content)
assert rules["max_size"] == 20000
def testVerifyAddress(self, site):
privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT
user_inner_path = "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json"
data_dict = site.storage.loadJson(user_inner_path)
users_content = site.content_manager.contents["data/users/content.json"]
data = io.BytesIO(json.dumps(data_dict).encode())
assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
# Test error on 15k data.json
data_dict["files"]["data.json"]["size"] = 1024 * 15
del data_dict["signs"] # Remove signs before signing
data_dict["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
}
data = io.BytesIO(json.dumps(data_dict).encode())
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
assert "Include too large" in str(err.value)
# Give more space based on address
users_content["user_contents"]["permissions"]["1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9"] = {"max_size": 20000}
del data_dict["signs"] # Remove signs before signing
data_dict["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
}
data = io.BytesIO(json.dumps(data_dict).encode())
assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
def testVerify(self, site):
privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT
user_inner_path = "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json"
data_dict = site.storage.loadJson(user_inner_path)
users_content = site.content_manager.contents["data/users/content.json"]
data = io.BytesIO(json.dumps(data_dict).encode())
assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
# Test max size exception by setting allowed to 0
rules = site.content_manager.getRules(user_inner_path, data_dict)
assert rules["max_size"] == 10000
assert users_content["user_contents"]["permission_rules"][".*"]["max_size"] == 10000
users_content["user_contents"]["permission_rules"][".*"]["max_size"] = 0
rules = site.content_manager.getRules(user_inner_path, data_dict)
assert rules["max_size"] == 0
data = io.BytesIO(json.dumps(data_dict).encode())
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
assert "Include too large" in str(err.value)
users_content["user_contents"]["permission_rules"][".*"]["max_size"] = 10000 # Reset
# Test max optional size exception
# 1 MB gif = Allowed
data_dict["files_optional"]["peanut-butter-jelly-time.gif"]["size"] = 1024 * 1024
del data_dict["signs"] # Remove signs before signing
data_dict["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
}
data = io.BytesIO(json.dumps(data_dict).encode())
assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
# 100 MB gif = Not allowed
data_dict["files_optional"]["peanut-butter-jelly-time.gif"]["size"] = 100 * 1024 * 1024
del data_dict["signs"] # Remove signs before signing
data_dict["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
}
data = io.BytesIO(json.dumps(data_dict).encode())
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
assert "Include optional files too large" in str(err.value)
data_dict["files_optional"]["peanut-butter-jelly-time.gif"]["size"] = 1024 * 1024 # Reset
# hello.exe = Not allowed
data_dict["files_optional"]["hello.exe"] = data_dict["files_optional"]["peanut-butter-jelly-time.gif"]
del data_dict["signs"] # Remove signs before signing
data_dict["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
}
data = io.BytesIO(json.dumps(data_dict).encode())
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
assert "Optional file not allowed" in str(err.value)
del data_dict["files_optional"]["hello.exe"] # Reset
# Includes not allowed in user content
data_dict["includes"] = {"other.json": {}}
del data_dict["signs"] # Remove signs before signing
data_dict["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey)
}
data = io.BytesIO(json.dumps(data_dict).encode())
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(user_inner_path, data, ignore_same=False)
assert "Includes not allowed" in str(err.value)
def testCert(self, site):
# user_addr = "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C"
user_priv = "5Kk7FSA63FC2ViKmKLuBxk9gQkaQ5713hKq8LmFAf4cVeXh6K6A"
# cert_addr = "14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet"
cert_priv = "5JusJDSjHaMHwUjDT3o6eQ54pA6poo8La5fAgn1wNc3iK59jxjA"
# Check if the user file is loaded
assert "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json" in site.content_manager.contents
user_content = site.content_manager.contents["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"]
rules_content = site.content_manager.contents["data/users/content.json"]
# Override valid cert signers for the test
rules_content["user_contents"]["cert_signers"]["zeroid.bit"] = [
"14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet",
"1iD5ZQJMNXu43w1qLB8sfdHVKppVMduGz"
]
# Check valid cert signers
rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
assert rules["cert_signers"] == {"zeroid.bit": [
"14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet",
"1iD5ZQJMNXu43w1qLB8sfdHVKppVMduGz"
]}
# Sign a valid cert
user_content["cert_sign"] = CryptBitcoin.sign("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % (
user_content["cert_auth_type"],
user_content["cert_user_id"].split("@")[0]
), cert_priv)
# Verify cert
assert site.content_manager.verifyCert("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content)
# Verify if the cert is valid for other address
assert not site.content_manager.verifyCert("data/users/badaddress/content.json", user_content)
# Sign user content
signed_content = site.content_manager.sign(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False
)
# Test user cert
assert site.content_manager.verifyFile(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
)
# Test banned user
cert_user_id = user_content["cert_user_id"] # My username
site.content_manager.contents["data/users/content.json"]["user_contents"]["permissions"][cert_user_id] = False
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
)
assert "Valid signs: 0/1" in str(err.value)
del site.content_manager.contents["data/users/content.json"]["user_contents"]["permissions"][cert_user_id] # Reset
# Test invalid cert
user_content["cert_sign"] = CryptBitcoin.sign(
"badaddress#%s/%s" % (user_content["cert_auth_type"], user_content["cert_user_id"]), cert_priv
)
signed_content = site.content_manager.sign(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False
)
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
)
assert "Invalid cert" in str(err.value)
# Test banned user, signed by the site owner
user_content["cert_sign"] = CryptBitcoin.sign("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % (
user_content["cert_auth_type"],
user_content["cert_user_id"].split("@")[0]
), cert_priv)
cert_user_id = user_content["cert_user_id"] # My username
site.content_manager.contents["data/users/content.json"]["user_contents"]["permissions"][cert_user_id] = False
site_privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT
del user_content["signs"] # Remove signs before signing
user_content["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(user_content, sort_keys=True), site_privatekey)
}
assert site.content_manager.verifyFile(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
io.BytesIO(json.dumps(user_content).encode()), ignore_same=False
)
def testMissingCert(self, site):
user_priv = "5Kk7FSA63FC2ViKmKLuBxk9gQkaQ5713hKq8LmFAf4cVeXh6K6A"
cert_priv = "5JusJDSjHaMHwUjDT3o6eQ54pA6poo8La5fAgn1wNc3iK59jxjA"
user_content = site.content_manager.contents["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"]
rules_content = site.content_manager.contents["data/users/content.json"]
# Override valid cert signers for the test
rules_content["user_contents"]["cert_signers"]["zeroid.bit"] = [
"14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet",
"1iD5ZQJMNXu43w1qLB8sfdHVKppVMduGz"
]
# Sign a valid cert
user_content["cert_sign"] = CryptBitcoin.sign("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % (
user_content["cert_auth_type"],
user_content["cert_user_id"].split("@")[0]
), cert_priv)
signed_content = site.content_manager.sign(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False
)
assert site.content_manager.verifyFile(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
)
# Test invalid cert_user_id
user_content["cert_user_id"] = "nodomain"
user_content["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(user_content, sort_keys=True), user_priv)
}
signed_content = site.content_manager.sign(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False
)
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
)
assert "Invalid domain in cert_user_id" in str(err.value)
# Test removed cert
del user_content["cert_user_id"]
del user_content["cert_auth_type"]
del user_content["signs"] # Remove signs before signing
user_content["signs"] = {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(user_content, sort_keys=True), user_priv)
}
signed_content = site.content_manager.sign(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False
)
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
)
assert "Missing cert_user_id" in str(err.value)
def testCertSignersPattern(self, site):
user_priv = "5Kk7FSA63FC2ViKmKLuBxk9gQkaQ5713hKq8LmFAf4cVeXh6K6A"
cert_priv = "5JusJDSjHaMHwUjDT3o6eQ54pA6poo8La5fAgn1wNc3iK59jxjA" # For 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet
user_content = site.content_manager.contents["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"]
rules_content = site.content_manager.contents["data/users/content.json"]
# Override valid cert signers for the test
rules_content["user_contents"]["cert_signers_pattern"] = "14wgQ[0-9][A-Z]"
# Sign a valid cert
user_content["cert_user_id"] = "certuser@14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet"
user_content["cert_sign"] = CryptBitcoin.sign("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % (
user_content["cert_auth_type"],
"certuser"
), cert_priv)
signed_content = site.content_manager.sign(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False
)
assert site.content_manager.verifyFile(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
)
# Cert does not matches the pattern
rules_content["user_contents"]["cert_signers_pattern"] = "14wgX[0-9][A-Z]"
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
)
assert "Invalid cert signer: 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" in str(err.value)
# Removed cert_signers_pattern
del rules_content["user_contents"]["cert_signers_pattern"]
with pytest.raises(VerifyError) as err:
site.content_manager.verifyFile(
"data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False
)
assert "Invalid cert signer: 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" in str(err.value)
def testNewFile(self, site):
privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT
inner_path = "data/users/1NEWrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"
site.storage.writeJson(inner_path, {"test": "data"})
site.content_manager.sign(inner_path, privatekey)
assert "test" in site.storage.loadJson(inner_path)
site.storage.delete(inner_path)

View File

@ -0,0 +1,48 @@
from Crypt import CryptBitcoin
class TestCryptBitcoin:
def testSign(self, crypt_bitcoin_lib):
privatekey = "5K9S6dVpufGnroRgFrT6wsKiz2mJRYsC73eWDmajaHserAp3F1C"
privatekey_bad = "5Jbm9rrusXyApAoM8YoM4Rja337zMMoBUMRJ1uijiguU2aZRnwC"
# Get address by privatekey
address = crypt_bitcoin_lib.privatekeyToAddress(privatekey)
assert address == "1MpDMxFeDUkiHohxx9tbGLeEGEuR4ZNsJz"
address_bad = crypt_bitcoin_lib.privatekeyToAddress(privatekey_bad)
assert address_bad != "1MpDMxFeDUkiHohxx9tbGLeEGEuR4ZNsJz"
# Text signing
data_len_list = list(range(0, 300, 10))
data_len_list += [1024, 2048, 1024 * 128, 1024 * 1024, 1024 * 2048]
for data_len in data_len_list:
data = data_len * "!"
sign = crypt_bitcoin_lib.sign(data, privatekey)
assert crypt_bitcoin_lib.verify(data, address, sign)
assert not crypt_bitcoin_lib.verify("invalid" + data, address, sign)
# Signed by bad privatekey
sign_bad = crypt_bitcoin_lib.sign("hello", privatekey_bad)
assert not crypt_bitcoin_lib.verify("hello", address, sign_bad)
def testVerify(self, crypt_bitcoin_lib):
sign_uncompressed = b'G6YkcFTuwKMVMHI2yycGQIFGbCZVNsZEZvSlOhKpHUt/BlADY94egmDAWdlrbbFrP9wH4aKcEfbLO8sa6f63VU0='
assert crypt_bitcoin_lib.verify("1NQUem2M4cAqWua6BVFBADtcSP55P4QobM#web/gitcenter", "19Bir5zRm1yo4pw9uuxQL8xwf9b7jqMpR", sign_uncompressed)
sign_compressed = b'H6YkcFTuwKMVMHI2yycGQIFGbCZVNsZEZvSlOhKpHUt/BlADY94egmDAWdlrbbFrP9wH4aKcEfbLO8sa6f63VU0='
assert crypt_bitcoin_lib.verify("1NQUem2M4cAqWua6BVFBADtcSP55P4QobM#web/gitcenter", "1KH5BdNnqxh2KRWMMT8wUXzUgz4vVQ4S8p", sign_compressed)
def testNewPrivatekey(self):
assert CryptBitcoin.newPrivatekey() != CryptBitcoin.newPrivatekey()
assert CryptBitcoin.privatekeyToAddress(CryptBitcoin.newPrivatekey())
def testNewSeed(self):
assert CryptBitcoin.newSeed() != CryptBitcoin.newSeed()
assert CryptBitcoin.privatekeyToAddress(
CryptBitcoin.hdPrivatekey(CryptBitcoin.newSeed(), 0)
)
assert CryptBitcoin.privatekeyToAddress(
CryptBitcoin.hdPrivatekey(CryptBitcoin.newSeed(), 2**256)
)

View File

@ -0,0 +1,23 @@
import os
from Config import config
from Crypt import CryptConnection
class TestCryptConnection:
def testSslCert(self):
# Remove old certs
if os.path.isfile("%s/cert-rsa.pem" % config.data_dir):
os.unlink("%s/cert-rsa.pem" % config.data_dir)
if os.path.isfile("%s/key-rsa.pem" % config.data_dir):
os.unlink("%s/key-rsa.pem" % config.data_dir)
# Generate certs
CryptConnection.manager.loadCerts()
assert "tls-rsa" in CryptConnection.manager.crypt_supported
assert CryptConnection.manager.selectCrypt(["tls-rsa", "unknown"]) == "tls-rsa" # It should choose the known crypt
# Check openssl cert generation
assert os.path.isfile("%s/cert-rsa.pem" % config.data_dir)
assert os.path.isfile("%s/key-rsa.pem" % config.data_dir)

31
src/Test/TestCryptHash.py Normal file
View File

@ -0,0 +1,31 @@
import base64
from Crypt import CryptHash
sha512t_sum_hex = "2e9466d8aa1f340c91203b4ddbe9b6669879616a1b8e9571058a74195937598d"
sha512t_sum_bin = b".\x94f\xd8\xaa\x1f4\x0c\x91 ;M\xdb\xe9\xb6f\x98yaj\x1b\x8e\x95q\x05\x8at\x19Y7Y\x8d"
sha256_sum_hex = "340cd04be7f530e3a7c1bc7b24f225ba5762ec7063a56e1ae01a30d56722e5c3"
class TestCryptBitcoin:
def testSha(self, site):
file_path = site.storage.getPath("dbschema.json")
assert CryptHash.sha512sum(file_path) == sha512t_sum_hex
assert CryptHash.sha512sum(open(file_path, "rb")) == sha512t_sum_hex
assert CryptHash.sha512sum(open(file_path, "rb"), format="digest") == sha512t_sum_bin
assert CryptHash.sha256sum(file_path) == sha256_sum_hex
assert CryptHash.sha256sum(open(file_path, "rb")) == sha256_sum_hex
with open(file_path, "rb") as f:
hash = CryptHash.Sha512t(f.read(100))
hash.hexdigest() != sha512t_sum_hex
hash.update(f.read(1024 * 1024))
assert hash.hexdigest() == sha512t_sum_hex
def testRandom(self):
assert len(CryptHash.random(64)) == 64
assert CryptHash.random() != CryptHash.random()
assert bytes.fromhex(CryptHash.random(encoding="hex"))
assert base64.b64decode(CryptHash.random(encoding="base64"))

137
src/Test/TestDb.py Normal file
View File

@ -0,0 +1,137 @@
import io
class TestDb:
def testCheckTables(self, db):
tables = [row["name"] for row in db.execute("SELECT name FROM sqlite_master WHERE type='table'")]
assert "keyvalue" in tables # To store simple key -> value
assert "json" in tables # Json file path registry
assert "test" in tables # The table defined in dbschema.json
# Verify test table
cols = [col["name"] for col in db.execute("PRAGMA table_info(test)")]
assert "test_id" in cols
assert "title" in cols
# Add new table
assert "newtest" not in tables
db.schema["tables"]["newtest"] = {
"cols": [
["newtest_id", "INTEGER"],
["newtitle", "TEXT"],
],
"indexes": ["CREATE UNIQUE INDEX newtest_id ON newtest(newtest_id)"],
"schema_changed": 1426195822
}
db.checkTables()
tables = [row["name"] for row in db.execute("SELECT name FROM sqlite_master WHERE type='table'")]
assert "test" in tables
assert "newtest" in tables
def testQueries(self, db):
# Test insert
for i in range(100):
db.execute("INSERT INTO test ?", {"test_id": i, "title": "Test #%s" % i})
assert db.execute("SELECT COUNT(*) AS num FROM test").fetchone()["num"] == 100
# Test single select
assert db.execute("SELECT COUNT(*) AS num FROM test WHERE ?", {"test_id": 1}).fetchone()["num"] == 1
# Test multiple select
assert db.execute("SELECT COUNT(*) AS num FROM test WHERE ?", {"test_id": [1, 2, 3]}).fetchone()["num"] == 3
assert db.execute(
"SELECT COUNT(*) AS num FROM test WHERE ?",
{"test_id": [1, 2, 3], "title": "Test #2"}
).fetchone()["num"] == 1
assert db.execute(
"SELECT COUNT(*) AS num FROM test WHERE ?",
{"test_id": [1, 2, 3], "title": ["Test #2", "Test #3", "Test #4"]}
).fetchone()["num"] == 2
# Test multiple select using named params
assert db.execute("SELECT COUNT(*) AS num FROM test WHERE test_id IN :test_id", {"test_id": [1, 2, 3]}).fetchone()["num"] == 3
assert db.execute(
"SELECT COUNT(*) AS num FROM test WHERE test_id IN :test_id AND title = :title",
{"test_id": [1, 2, 3], "title": "Test #2"}
).fetchone()["num"] == 1
assert db.execute(
"SELECT COUNT(*) AS num FROM test WHERE test_id IN :test_id AND title IN :title",
{"test_id": [1, 2, 3], "title": ["Test #2", "Test #3", "Test #4"]}
).fetchone()["num"] == 2
# Large ammount of IN values
assert db.execute(
"SELECT COUNT(*) AS num FROM test WHERE ?",
{"not__test_id": list(range(2, 3000))}
).fetchone()["num"] == 2
assert db.execute(
"SELECT COUNT(*) AS num FROM test WHERE ?",
{"test_id": list(range(50, 3000))}
).fetchone()["num"] == 50
assert db.execute(
"SELECT COUNT(*) AS num FROM test WHERE ?",
{"not__title": ["Test #%s" % i for i in range(50, 3000)]}
).fetchone()["num"] == 50
assert db.execute(
"SELECT COUNT(*) AS num FROM test WHERE ?",
{"title__like": "%20%"}
).fetchone()["num"] == 1
# Test named parameter escaping
assert db.execute(
"SELECT COUNT(*) AS num FROM test WHERE test_id = :test_id AND title LIKE :titlelike",
{"test_id": 1, "titlelike": "Test%"}
).fetchone()["num"] == 1
def testEscaping(self, db):
# Test insert
for i in range(100):
db.execute("INSERT INTO test ?", {"test_id": i, "title": "Test '\" #%s" % i})
assert db.execute(
"SELECT COUNT(*) AS num FROM test WHERE ?",
{"title": "Test '\" #1"}
).fetchone()["num"] == 1
assert db.execute(
"SELECT COUNT(*) AS num FROM test WHERE ?",
{"title": ["Test '\" #%s" % i for i in range(0, 50)]}
).fetchone()["num"] == 50
assert db.execute(
"SELECT COUNT(*) AS num FROM test WHERE ?",
{"not__title": ["Test '\" #%s" % i for i in range(50, 3000)]}
).fetchone()["num"] == 50
def testUpdateJson(self, db):
f = io.BytesIO()
f.write("""
{
"test": [
{"test_id": 1, "title": "Test 1 title", "extra col": "Ignore it"}
]
}
""".encode())
f.seek(0)
assert db.updateJson(db.db_dir + "data.json", f) is True
assert db.execute("SELECT COUNT(*) AS num FROM test_importfilter").fetchone()["num"] == 1
assert db.execute("SELECT COUNT(*) AS num FROM test").fetchone()["num"] == 1
def testUnsafePattern(self, db):
db.schema["maps"] = {"[A-Za-z.]*": db.schema["maps"]["data.json"]} # Only repetition of . supported
f = io.StringIO()
f.write("""
{
"test": [
{"test_id": 1, "title": "Test 1 title", "extra col": "Ignore it"}
]
}
""")
f.seek(0)
assert db.updateJson(db.db_dir + "data.json", f) is False
assert db.execute("SELECT COUNT(*) AS num FROM test_importfilter").fetchone()["num"] == 0
assert db.execute("SELECT COUNT(*) AS num FROM test").fetchone()["num"] == 0

31
src/Test/TestDbQuery.py Normal file
View File

@ -0,0 +1,31 @@
import re
from Db.DbQuery import DbQuery
class TestDbQuery:
def testParse(self):
query_text = """
SELECT
'comment' AS type,
date_added, post.title AS title,
keyvalue.value || ': ' || comment.body AS body,
'?Post:' || comment.post_id || '#Comments' AS url
FROM
comment
LEFT JOIN json USING (json_id)
LEFT JOIN json AS json_content ON (json_content.directory = json.directory AND json_content.file_name='content.json')
LEFT JOIN keyvalue ON (keyvalue.json_id = json_content.json_id AND key = 'cert_user_id')
LEFT JOIN post ON (comment.post_id = post.post_id)
WHERE
post.date_added > 123
ORDER BY
date_added DESC
LIMIT 20
"""
query = DbQuery(query_text)
assert query.parts["LIMIT"] == "20"
assert query.fields["body"] == "keyvalue.value || ': ' || comment.body"
assert re.sub("[ \r\n]", "", str(query)) == re.sub("[ \r\n]", "", query_text)
query.wheres.append("body LIKE '%hello%'")
assert "body LIKE '%hello%'" in str(query)

52
src/Test/TestDebug.py Normal file
View File

@ -0,0 +1,52 @@
from Debug import Debug
import gevent
import os
import re
import pytest
class TestDebug:
@pytest.mark.parametrize("items,expected", [
(["@/src/A/B/C.py:17"], ["A/B/C.py line 17"]), # basic test
(["@/src/Db/Db.py:17"], ["Db.py line 17"]), # path compression
(["%s:1" % __file__], ["TestDebug.py line 1"]),
(["@/plugins/Chart/ChartDb.py:100"], ["ChartDb.py line 100"]), # plugins
(["@/main.py:17"], ["main.py line 17"]), # root
(["@\\src\\Db\\__init__.py:17"], ["Db/__init__.py line 17"]), # Windows paths
(["<frozen importlib._bootstrap>:1"], []), # importlib builtins
(["<frozen importlib._bootstrap_external>:1"], []), # importlib builtins
(["/home/ivanq/ZeroNet/src/main.py:13"], ["?/src/main.py line 13"]), # best-effort anonymization
(["C:\\ZeroNet\\core\\src\\main.py:13"], ["?/src/main.py line 13"]),
(["/root/main.py:17"], ["/root/main.py line 17"]),
(["{gevent}:13"], ["<gevent>/__init__.py line 13"]), # modules
(["{os}:13"], ["<os> line 13"]), # python builtin modules
(["src/gevent/event.py:17"], ["<gevent>/event.py line 17"]), # gevent-overriden __file__
(["@/src/Db/Db.py:17", "@/src/Db/DbQuery.py:1"], ["Db.py line 17", "DbQuery.py line 1"]), # mutliple args
(["@/src/Db/Db.py:17", "@/src/Db/Db.py:1"], ["Db.py line 17", "1"]), # same file
(["{os}:1", "@/src/Db/Db.py:17"], ["<os> line 1", "Db.py line 17"]), # builtins
(["{gevent}:1"] + ["{os}:3"] * 4 + ["@/src/Db/Db.py:17"], ["<gevent>/__init__.py line 1", "...", "Db.py line 17"])
])
def testFormatTraceback(self, items, expected):
q_items = []
for item in items:
file, line = item.rsplit(":", 1)
if file.startswith("@"):
file = Debug.root_dir + file[1:]
file = file.replace("{os}", os.__file__)
file = file.replace("{gevent}", gevent.__file__)
q_items.append((file, int(line)))
assert Debug.formatTraceback(q_items) == expected
def testFormatException(self):
try:
raise ValueError("Test exception")
except Exception:
assert re.match(r"ValueError: Test exception in TestDebug.py line [0-9]+", Debug.formatException())
try:
os.path.abspath(1)
except Exception:
assert re.search(r"in TestDebug.py line [0-9]+ > <(posixpath|ntpath)> line ", Debug.formatException())
def testFormatStack(self):
assert re.match(r"TestDebug.py line [0-9]+ > <_pytest>/python.py line [0-9]+", Debug.formatStack())

58
src/Test/TestDiff.py Normal file
View File

@ -0,0 +1,58 @@
import io
from util import Diff
class TestDiff:
def testDiff(self):
assert Diff.diff(
[],
["one", "two", "three"]
) == [("+", ["one", "two","three"])]
assert Diff.diff(
["one", "two", "three"],
["one", "two", "three", "four", "five"]
) == [("=", 11), ("+", ["four", "five"])]
assert Diff.diff(
["one", "two", "three", "six"],
["one", "two", "three", "four", "five", "six"]
) == [("=", 11), ("+", ["four", "five"]), ("=", 3)]
assert Diff.diff(
["one", "two", "three", "hmm", "six"],
["one", "two", "three", "four", "five", "six"]
) == [("=", 11), ("-", 3), ("+", ["four", "five"]), ("=", 3)]
assert Diff.diff(
["one", "two", "three"],
[]
) == [("-", 11)]
def testUtf8(self):
assert Diff.diff(
["one", "\xe5\xad\xa6\xe4\xb9\xa0\xe4\xb8\x8b", "two", "three"],
["one", "\xe5\xad\xa6\xe4\xb9\xa0\xe4\xb8\x8b", "two", "three", "four", "five"]
) == [("=", 20), ("+", ["four", "five"])]
def testDiffLimit(self):
old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix")
new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix")
actions = Diff.diff(list(old_f), list(new_f), limit=1024)
assert actions
old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix")
new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix"*1024)
actions = Diff.diff(list(old_f), list(new_f), limit=1024)
assert actions is False
def testPatch(self):
old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix")
new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix")
actions = Diff.diff(
list(old_f),
list(new_f)
)
old_f.seek(0)
assert Diff.patch(old_f, actions).getvalue() == new_f.getvalue()

65
src/Test/TestEvent.py Normal file
View File

@ -0,0 +1,65 @@
import util
class ExampleClass(object):
def __init__(self):
self.called = []
self.onChanged = util.Event()
def increment(self, title):
self.called.append(title)
class TestEvent:
def testEvent(self):
test_obj = ExampleClass()
test_obj.onChanged.append(lambda: test_obj.increment("Called #1"))
test_obj.onChanged.append(lambda: test_obj.increment("Called #2"))
test_obj.onChanged.once(lambda: test_obj.increment("Once"))
assert test_obj.called == []
test_obj.onChanged()
assert test_obj.called == ["Called #1", "Called #2", "Once"]
test_obj.onChanged()
test_obj.onChanged()
assert test_obj.called == ["Called #1", "Called #2", "Once", "Called #1", "Called #2", "Called #1", "Called #2"]
def testOnce(self):
test_obj = ExampleClass()
test_obj.onChanged.once(lambda: test_obj.increment("Once test #1"))
# It should be called only once
assert test_obj.called == []
test_obj.onChanged()
assert test_obj.called == ["Once test #1"]
test_obj.onChanged()
test_obj.onChanged()
assert test_obj.called == ["Once test #1"]
def testOnceMultiple(self):
test_obj = ExampleClass()
# Allow queue more than once
test_obj.onChanged.once(lambda: test_obj.increment("Once test #1"))
test_obj.onChanged.once(lambda: test_obj.increment("Once test #2"))
test_obj.onChanged.once(lambda: test_obj.increment("Once test #3"))
assert test_obj.called == []
test_obj.onChanged()
assert test_obj.called == ["Once test #1", "Once test #2", "Once test #3"]
test_obj.onChanged()
test_obj.onChanged()
assert test_obj.called == ["Once test #1", "Once test #2", "Once test #3"]
def testOnceNamed(self):
test_obj = ExampleClass()
# Dont store more that one from same type
test_obj.onChanged.once(lambda: test_obj.increment("Once test #1/1"), "type 1")
test_obj.onChanged.once(lambda: test_obj.increment("Once test #1/2"), "type 1")
test_obj.onChanged.once(lambda: test_obj.increment("Once test #2"), "type 2")
assert test_obj.called == []
test_obj.onChanged()
assert test_obj.called == ["Once test #1/1", "Once test #2"]
test_obj.onChanged()
test_obj.onChanged()
assert test_obj.called == ["Once test #1/1", "Once test #2"]

124
src/Test/TestFileRequest.py Normal file
View File

@ -0,0 +1,124 @@
import io
import pytest
import time
from Connection import ConnectionServer
from Connection import Connection
from File import FileServer
@pytest.mark.usefixtures("resetSettings")
@pytest.mark.usefixtures("resetTempSettings")
class TestFileRequest:
def testGetFile(self, file_server, site):
file_server.ip_incoming = {} # Reset flood protection
client = ConnectionServer(file_server.ip, 1545)
connection = client.getConnection(file_server.ip, 1544)
file_server.sites[site.address] = site
# Normal request
response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0})
assert b"sign" in response["body"]
response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": site.storage.getSize("content.json")})
assert b"sign" in response["body"]
# Invalid file
response = connection.request("getFile", {"site": site.address, "inner_path": "invalid.file", "location": 0})
assert "File read error" in response["error"]
# Location over size
response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 1024 * 1024})
assert "File read error" in response["error"]
# Stream from parent dir
response = connection.request("getFile", {"site": site.address, "inner_path": "../users.json", "location": 0})
assert "File read exception" in response["error"]
# Invalid site
response = connection.request("getFile", {"site": "", "inner_path": "users.json", "location": 0})
assert "Unknown site" in response["error"]
response = connection.request("getFile", {"site": ".", "inner_path": "users.json", "location": 0})
assert "Unknown site" in response["error"]
# Invalid size
response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": 1234})
assert "File size does not match" in response["error"]
# Invalid path
for path in ["../users.json", "./../users.json", "data/../content.json", ".../users.json"]:
for sep in ["/", "\\"]:
response = connection.request("getFile", {"site": site.address, "inner_path": path.replace("/", sep), "location": 0})
assert response["error"] == 'File read exception'
connection.close()
client.stop()
def testStreamFile(self, file_server, site):
file_server.ip_incoming = {} # Reset flood protection
client = ConnectionServer(file_server.ip, 1545)
connection = client.getConnection(file_server.ip, 1544)
file_server.sites[site.address] = site
buff = io.BytesIO()
response = connection.request("streamFile", {"site": site.address, "inner_path": "content.json", "location": 0}, buff)
assert "stream_bytes" in response
assert b"sign" in buff.getvalue()
# Invalid file
buff = io.BytesIO()
response = connection.request("streamFile", {"site": site.address, "inner_path": "invalid.file", "location": 0}, buff)
assert "File read error" in response["error"]
# Location over size
buff = io.BytesIO()
response = connection.request(
"streamFile", {"site": site.address, "inner_path": "content.json", "location": 1024 * 1024}, buff
)
assert "File read error" in response["error"]
# Stream from parent dir
buff = io.BytesIO()
response = connection.request("streamFile", {"site": site.address, "inner_path": "../users.json", "location": 0}, buff)
assert "File read exception" in response["error"]
connection.close()
client.stop()
def testPex(self, file_server, site, site_temp):
file_server.sites[site.address] = site
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
connection = client.getConnection(file_server.ip, 1544)
# Add new fake peer to site
fake_peer = site.addPeer(file_server.ip_external, 11337, return_peer=True)
# Add fake connection to it
fake_peer.connection = Connection(file_server, file_server.ip_external, 11337)
fake_peer.connection.last_recv_time = time.time()
assert fake_peer in site.getConnectablePeers()
# Add file_server as peer to client
peer_file_server = site_temp.addPeer(file_server.ip, 1544)
assert "%s:11337" % file_server.ip_external not in site_temp.peers
assert peer_file_server.pex()
assert "%s:11337" % file_server.ip_external in site_temp.peers
# Should not exchange private peers from local network
fake_peer_private = site.addPeer("192.168.0.1", 11337, return_peer=True)
assert fake_peer_private not in site.getConnectablePeers(allow_private=False)
fake_peer_private.connection = Connection(file_server, "192.168.0.1", 11337)
fake_peer_private.connection.last_recv_time = time.time()
assert "192.168.0.1:11337" not in site_temp.peers
assert not peer_file_server.pex()
assert "192.168.0.1:11337" not in site_temp.peers
connection.close()
client.stop()

39
src/Test/TestFlag.py Normal file
View File

@ -0,0 +1,39 @@
import os
import pytest
from util.Flag import Flag
class TestFlag:
def testFlagging(self):
flag = Flag()
@flag.admin
@flag.no_multiuser
def testFn(anything):
return anything
assert "admin" in flag.db["testFn"]
assert "no_multiuser" in flag.db["testFn"]
def testSubclassedFlagging(self):
flag = Flag()
class Test:
@flag.admin
@flag.no_multiuser
def testFn(anything):
return anything
class SubTest(Test):
pass
assert "admin" in flag.db["testFn"]
assert "no_multiuser" in flag.db["testFn"]
def testInvalidFlag(self):
flag = Flag()
with pytest.raises(Exception) as err:
@flag.no_multiuser
@flag.unknown_flag
def testFn(anything):
return anything
assert "Invalid flag" in str(err.value)

79
src/Test/TestHelper.py Normal file
View File

@ -0,0 +1,79 @@
import socket
import struct
import os
import pytest
from util import helper
from Config import config
@pytest.mark.usefixtures("resetSettings")
class TestHelper:
def testShellquote(self):
assert helper.shellquote("hel'lo") == "\"hel'lo\"" # Allow '
assert helper.shellquote('hel"lo') == '"hello"' # Remove "
assert helper.shellquote("hel'lo", 'hel"lo') == ('"hel\'lo"', '"hello"')
def testPackAddress(self):
for port in [1, 1000, 65535]:
for ip in ["1.1.1.1", "127.0.0.1", "0.0.0.0", "255.255.255.255", "192.168.1.1"]:
assert len(helper.packAddress(ip, port)) == 6
assert helper.unpackAddress(helper.packAddress(ip, port)) == (ip, port)
for ip in ["1:2:3:4:5:6:7:8", "::1", "2001:19f0:6c01:e76:5400:1ff:fed6:3eca", "2001:4860:4860::8888"]:
assert len(helper.packAddress(ip, port)) == 18
assert helper.unpackAddress(helper.packAddress(ip, port)) == (ip, port)
assert len(helper.packOnionAddress("boot3rdez4rzn36x.onion", port)) == 12
assert helper.unpackOnionAddress(helper.packOnionAddress("boot3rdez4rzn36x.onion", port)) == ("boot3rdez4rzn36x.onion", port)
with pytest.raises(struct.error):
helper.packAddress("1.1.1.1", 100000)
with pytest.raises(socket.error):
helper.packAddress("999.1.1.1", 1)
with pytest.raises(Exception):
helper.unpackAddress("X")
def testGetDirname(self):
assert helper.getDirname("data/users/content.json") == "data/users/"
assert helper.getDirname("data/users") == "data/"
assert helper.getDirname("") == ""
assert helper.getDirname("content.json") == ""
assert helper.getDirname("data/users/") == "data/users/"
assert helper.getDirname("/data/users/content.json") == "data/users/"
def testGetFilename(self):
assert helper.getFilename("data/users/content.json") == "content.json"
assert helper.getFilename("data/users") == "users"
assert helper.getFilename("") == ""
assert helper.getFilename("content.json") == "content.json"
assert helper.getFilename("data/users/") == ""
assert helper.getFilename("/data/users/content.json") == "content.json"
def testIsIp(self):
assert helper.isIp("1.2.3.4")
assert helper.isIp("255.255.255.255")
assert not helper.isIp("any.host")
assert not helper.isIp("1.2.3.4.com")
assert not helper.isIp("1.2.3.4.any.host")
def testIsPrivateIp(self):
assert helper.isPrivateIp("192.168.1.1")
assert not helper.isPrivateIp("1.1.1.1")
assert helper.isPrivateIp("fe80::44f0:3d0:4e6:637c")
assert not helper.isPrivateIp("fca5:95d6:bfde:d902:8951:276e:1111:a22c") # cjdns
def testOpenLocked(self):
locked_f = helper.openLocked(config.data_dir + "/locked.file")
assert locked_f
with pytest.raises(BlockingIOError):
locked_f_again = helper.openLocked(config.data_dir + "/locked.file")
locked_f_different = helper.openLocked(config.data_dir + "/locked_different.file")
locked_f.close()
locked_f_different.close()
os.unlink(locked_f.name)
os.unlink(locked_f_different.name)

88
src/Test/TestMsgpack.py Normal file
View File

@ -0,0 +1,88 @@
import io
import os
import msgpack
import pytest
from Config import config
from util import Msgpack
from collections import OrderedDict
class TestMsgpack:
test_data = OrderedDict(
sorted({"cmd": "fileGet", "bin": b'p\x81zDhL\xf0O\xd0\xaf', "params": {"site": "1Site"}, "utf8": b'\xc3\xa1rv\xc3\xadzt\xc5\xb1r\xc5\x91'.decode("utf8"), "list": [b'p\x81zDhL\xf0O\xd0\xaf', b'p\x81zDhL\xf0O\xd0\xaf']}.items())
)
def testPacking(self):
assert Msgpack.pack(self.test_data) == b'\x85\xa3bin\xc4\np\x81zDhL\xf0O\xd0\xaf\xa3cmd\xa7fileGet\xa4list\x92\xc4\np\x81zDhL\xf0O\xd0\xaf\xc4\np\x81zDhL\xf0O\xd0\xaf\xa6params\x81\xa4site\xa51Site\xa4utf8\xad\xc3\xa1rv\xc3\xadzt\xc5\xb1r\xc5\x91'
assert Msgpack.pack(self.test_data, use_bin_type=False) == b'\x85\xa3bin\xaap\x81zDhL\xf0O\xd0\xaf\xa3cmd\xa7fileGet\xa4list\x92\xaap\x81zDhL\xf0O\xd0\xaf\xaap\x81zDhL\xf0O\xd0\xaf\xa6params\x81\xa4site\xa51Site\xa4utf8\xad\xc3\xa1rv\xc3\xadzt\xc5\xb1r\xc5\x91'
def testUnpackinkg(self):
assert Msgpack.unpack(Msgpack.pack(self.test_data)) == self.test_data
@pytest.mark.parametrize("unpacker_class", [msgpack.Unpacker, msgpack.fallback.Unpacker])
def testUnpacker(self, unpacker_class):
unpacker = unpacker_class(raw=False)
data = msgpack.packb(self.test_data, use_bin_type=True)
data += msgpack.packb(self.test_data, use_bin_type=True)
messages = []
for char in data:
unpacker.feed(bytes([char]))
for message in unpacker:
messages.append(message)
assert len(messages) == 2
assert messages[0] == self.test_data
assert messages[0] == messages[1]
def testStreaming(self):
bin_data = os.urandom(20)
f = Msgpack.FilePart("%s/users.json" % config.data_dir, "rb")
f.read_bytes = 30
data = {"cmd": "response", "body": f, "bin": bin_data}
out_buff = io.BytesIO()
Msgpack.stream(data, out_buff.write)
out_buff.seek(0)
data_packb = {
"cmd": "response",
"body": open("%s/users.json" % config.data_dir, "rb").read(30),
"bin": bin_data
}
out_buff.seek(0)
data_unpacked = Msgpack.unpack(out_buff.read())
assert data_unpacked == data_packb
assert data_unpacked["cmd"] == "response"
assert type(data_unpacked["body"]) == bytes
def testBackwardCompatibility(self):
packed = {}
packed["py3"] = Msgpack.pack(self.test_data, use_bin_type=False)
packed["py3_bin"] = Msgpack.pack(self.test_data, use_bin_type=True)
for key, val in packed.items():
unpacked = Msgpack.unpack(val)
type(unpacked["utf8"]) == str
type(unpacked["bin"]) == bytes
# Packed with use_bin_type=False (pre-ZeroNet 0.7.0)
unpacked = Msgpack.unpack(packed["py3"], decode=True)
type(unpacked["utf8"]) == str
type(unpacked["bin"]) == bytes
assert len(unpacked["utf8"]) == 9
assert len(unpacked["bin"]) == 10
with pytest.raises(UnicodeDecodeError) as err: # Try to decode binary as utf-8
unpacked = Msgpack.unpack(packed["py3"], decode=False)
# Packed with use_bin_type=True
unpacked = Msgpack.unpack(packed["py3_bin"], decode=False)
type(unpacked["utf8"]) == str
type(unpacked["bin"]) == bytes
assert len(unpacked["utf8"]) == 9
assert len(unpacked["bin"]) == 10

167
src/Test/TestNoparallel.py Normal file
View File

@ -0,0 +1,167 @@
import time
import gevent
import pytest
import util
from util import ThreadPool
@pytest.fixture(params=['gevent.spawn', 'thread_pool.spawn'])
def queue_spawn(request):
thread_pool = ThreadPool.ThreadPool(10)
if request.param == "gevent.spawn":
return gevent.spawn
else:
return thread_pool.spawn
class ExampleClass(object):
def __init__(self):
self.counted = 0
@util.Noparallel()
def countBlocking(self, num=5):
for i in range(1, num + 1):
time.sleep(0.1)
self.counted += 1
return "counted:%s" % i
@util.Noparallel(queue=True, ignore_class=True)
def countQueue(self, num=5):
for i in range(1, num + 1):
time.sleep(0.1)
self.counted += 1
return "counted:%s" % i
@util.Noparallel(blocking=False)
def countNoblocking(self, num=5):
for i in range(1, num + 1):
time.sleep(0.01)
self.counted += 1
return "counted:%s" % i
class TestNoparallel:
def testBlocking(self, queue_spawn):
obj1 = ExampleClass()
obj2 = ExampleClass()
# Dont allow to call again until its running and wait until its running
threads = [
queue_spawn(obj1.countBlocking),
queue_spawn(obj1.countBlocking),
queue_spawn(obj1.countBlocking),
queue_spawn(obj2.countBlocking)
]
assert obj2.countBlocking() == "counted:5" # The call is ignored as obj2.countBlocking already counting, but block until its finishes
gevent.joinall(threads)
assert [thread.value for thread in threads] == ["counted:5", "counted:5", "counted:5", "counted:5"]
obj2.countBlocking() # Allow to call again as obj2.countBlocking finished
assert obj1.counted == 5
assert obj2.counted == 10
def testNoblocking(self):
obj1 = ExampleClass()
thread1 = obj1.countNoblocking()
thread2 = obj1.countNoblocking() # Ignored
assert obj1.counted == 0
time.sleep(0.1)
assert thread1.value == "counted:5"
assert thread2.value == "counted:5"
assert obj1.counted == 5
obj1.countNoblocking().join() # Allow again and wait until finishes
assert obj1.counted == 10
def testQueue(self, queue_spawn):
obj1 = ExampleClass()
queue_spawn(obj1.countQueue, num=1)
queue_spawn(obj1.countQueue, num=1)
queue_spawn(obj1.countQueue, num=1)
time.sleep(0.3)
assert obj1.counted == 2 # No multi-queue supported
obj2 = ExampleClass()
queue_spawn(obj2.countQueue, num=10)
queue_spawn(obj2.countQueue, num=10)
time.sleep(1.5) # Call 1 finished, call 2 still working
assert 10 < obj2.counted < 20
queue_spawn(obj2.countQueue, num=10)
time.sleep(2.0)
assert obj2.counted == 30
def testQueueOverload(self):
obj1 = ExampleClass()
threads = []
for i in range(1000):
thread = gevent.spawn(obj1.countQueue, num=5)
threads.append(thread)
gevent.joinall(threads)
assert obj1.counted == 5 * 2 # Only called twice (no multi-queue allowed)
def testIgnoreClass(self, queue_spawn):
obj1 = ExampleClass()
obj2 = ExampleClass()
threads = [
queue_spawn(obj1.countQueue),
queue_spawn(obj1.countQueue),
queue_spawn(obj1.countQueue),
queue_spawn(obj2.countQueue),
queue_spawn(obj2.countQueue)
]
s = time.time()
time.sleep(0.001)
gevent.joinall(threads)
# Queue limited to 2 calls (every call takes counts to 5 and takes 0.05 sec)
assert obj1.counted + obj2.counted == 10
taken = time.time() - s
assert 1.2 > taken >= 1.0 # 2 * 0.5s count = ~1s
def testException(self, queue_spawn):
class MyException(Exception):
pass
@util.Noparallel()
def raiseException():
raise MyException("Test error!")
with pytest.raises(MyException) as err:
raiseException()
assert str(err.value) == "Test error!"
with pytest.raises(MyException) as err:
queue_spawn(raiseException).get()
assert str(err.value) == "Test error!"
def testMultithreadMix(self, queue_spawn):
obj1 = ExampleClass()
with ThreadPool.ThreadPool(10) as thread_pool:
s = time.time()
t1 = queue_spawn(obj1.countBlocking, 5)
time.sleep(0.01)
t2 = thread_pool.spawn(obj1.countBlocking, 5)
time.sleep(0.01)
t3 = thread_pool.spawn(obj1.countBlocking, 5)
time.sleep(0.3)
t4 = gevent.spawn(obj1.countBlocking, 5)
threads = [t1, t2, t3, t4]
for thread in threads:
assert thread.get() == "counted:5"
time_taken = time.time() - s
assert obj1.counted == 5
assert 0.5 < time_taken < 0.7

159
src/Test/TestPeer.py Normal file
View File

@ -0,0 +1,159 @@
import time
import io
import pytest
from File import FileServer
from File import FileRequest
from Crypt import CryptHash
from . import Spy
@pytest.mark.usefixtures("resetSettings")
@pytest.mark.usefixtures("resetTempSettings")
class TestPeer:
def testPing(self, file_server, site, site_temp):
file_server.sites[site.address] = site
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
connection = client.getConnection(file_server.ip, 1544)
# Add file_server as peer to client
peer_file_server = site_temp.addPeer(file_server.ip, 1544)
assert peer_file_server.ping() is not None
assert peer_file_server in site_temp.peers.values()
peer_file_server.remove()
assert peer_file_server not in site_temp.peers.values()
connection.close()
client.stop()
def testDownloadFile(self, file_server, site, site_temp):
file_server.sites[site.address] = site
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
connection = client.getConnection(file_server.ip, 1544)
# Add file_server as peer to client
peer_file_server = site_temp.addPeer(file_server.ip, 1544)
# Testing streamFile
buff = peer_file_server.getFile(site_temp.address, "content.json", streaming=True)
assert b"sign" in buff.getvalue()
# Testing getFile
buff = peer_file_server.getFile(site_temp.address, "content.json")
assert b"sign" in buff.getvalue()
connection.close()
client.stop()
def testHashfield(self, site):
sample_hash = list(site.content_manager.contents["content.json"]["files_optional"].values())[0]["sha512"]
site.storage.verifyFiles(quick_check=True) # Find what optional files we have
# Check if hashfield has any files
assert site.content_manager.hashfield
assert len(site.content_manager.hashfield) > 0
# Check exsist hash
assert site.content_manager.hashfield.getHashId(sample_hash) in site.content_manager.hashfield
# Add new hash
new_hash = CryptHash.sha512sum(io.BytesIO(b"hello"))
assert site.content_manager.hashfield.getHashId(new_hash) not in site.content_manager.hashfield
assert site.content_manager.hashfield.appendHash(new_hash)
assert not site.content_manager.hashfield.appendHash(new_hash) # Don't add second time
assert site.content_manager.hashfield.getHashId(new_hash) in site.content_manager.hashfield
# Remove new hash
assert site.content_manager.hashfield.removeHash(new_hash)
assert site.content_manager.hashfield.getHashId(new_hash) not in site.content_manager.hashfield
def testHashfieldExchange(self, file_server, site, site_temp):
server1 = file_server
server1.sites[site.address] = site
site.connection_server = server1
server2 = FileServer(file_server.ip, 1545)
server2.sites[site_temp.address] = site_temp
site_temp.connection_server = server2
site.storage.verifyFiles(quick_check=True) # Find what optional files we have
# Add file_server as peer to client
server2_peer1 = site_temp.addPeer(file_server.ip, 1544)
# Check if hashfield has any files
assert len(site.content_manager.hashfield) > 0
# Testing hashfield sync
assert len(server2_peer1.hashfield) == 0
assert server2_peer1.updateHashfield() # Query hashfield from peer
assert len(server2_peer1.hashfield) > 0
# Test force push new hashfield
site_temp.content_manager.hashfield.appendHash("AABB")
server1_peer2 = site.addPeer(file_server.ip, 1545, return_peer=True)
with Spy.Spy(FileRequest, "route") as requests:
assert len(server1_peer2.hashfield) == 0
server2_peer1.sendMyHashfield()
assert len(server1_peer2.hashfield) == 1
server2_peer1.sendMyHashfield() # Hashfield not changed, should be ignored
assert len(requests) == 1
time.sleep(0.01) # To make hashfield change date different
site_temp.content_manager.hashfield.appendHash("AACC")
server2_peer1.sendMyHashfield() # Push hashfield
assert len(server1_peer2.hashfield) == 2
assert len(requests) == 2
site_temp.content_manager.hashfield.appendHash("AADD")
assert server1_peer2.updateHashfield(force=True) # Request hashfield
assert len(server1_peer2.hashfield) == 3
assert len(requests) == 3
assert not server2_peer1.sendMyHashfield() # Not changed, should be ignored
assert len(requests) == 3
server2.stop()
def testFindHash(self, file_server, site, site_temp):
file_server.sites[site.address] = site
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
# Add file_server as peer to client
peer_file_server = site_temp.addPeer(file_server.ip, 1544)
assert peer_file_server.findHashIds([1234]) == {}
# Add fake peer with requred hash
fake_peer_1 = site.addPeer(file_server.ip_external, 1544)
fake_peer_1.hashfield.append(1234)
fake_peer_2 = site.addPeer("1.2.3.5", 1545)
fake_peer_2.hashfield.append(1234)
fake_peer_2.hashfield.append(1235)
fake_peer_3 = site.addPeer("1.2.3.6", 1546)
fake_peer_3.hashfield.append(1235)
fake_peer_3.hashfield.append(1236)
res = peer_file_server.findHashIds([1234, 1235])
assert sorted(res[1234]) == sorted([(file_server.ip_external, 1544), ("1.2.3.5", 1545)])
assert sorted(res[1235]) == sorted([("1.2.3.5", 1545), ("1.2.3.6", 1546)])
# Test my address adding
site.content_manager.hashfield.append(1234)
res = peer_file_server.findHashIds([1234, 1235])
assert sorted(res[1234]) == sorted([(file_server.ip_external, 1544), ("1.2.3.5", 1545), (file_server.ip, 1544)])
assert sorted(res[1235]) == sorted([("1.2.3.5", 1545), ("1.2.3.6", 1546)])

100
src/Test/TestRateLimit.py Normal file
View File

@ -0,0 +1,100 @@
import time
import gevent
from util import RateLimit
# Time is around limit +/- 0.05 sec
def around(t, limit):
return t >= limit - 0.05 and t <= limit + 0.05
class ExampleClass(object):
def __init__(self):
self.counted = 0
self.last_called = None
def count(self, back="counted"):
self.counted += 1
self.last_called = back
return back
class TestRateLimit:
def testCall(self):
obj1 = ExampleClass()
obj2 = ExampleClass()
s = time.time()
assert RateLimit.call("counting", allowed_again=0.1, func=obj1.count) == "counted"
assert around(time.time() - s, 0.0) # First allow to call instantly
assert obj1.counted == 1
# Call again
assert not RateLimit.isAllowed("counting", 0.1)
assert RateLimit.isAllowed("something else", 0.1)
assert RateLimit.call("counting", allowed_again=0.1, func=obj1.count) == "counted"
assert around(time.time() - s, 0.1) # Delays second call within interval
assert obj1.counted == 2
time.sleep(0.1) # Wait the cooldown time
# Call 3 times async
s = time.time()
assert obj2.counted == 0
threads = [
gevent.spawn(lambda: RateLimit.call("counting", allowed_again=0.1, func=obj2.count)), # Instant
gevent.spawn(lambda: RateLimit.call("counting", allowed_again=0.1, func=obj2.count)), # 0.1s delay
gevent.spawn(lambda: RateLimit.call("counting", allowed_again=0.1, func=obj2.count)) # 0.2s delay
]
gevent.joinall(threads)
assert [thread.value for thread in threads] == ["counted", "counted", "counted"]
assert around(time.time() - s, 0.2)
# Wait 0.1s cooldown
assert not RateLimit.isAllowed("counting", 0.1)
time.sleep(0.11)
assert RateLimit.isAllowed("counting", 0.1)
# No queue = instant again
s = time.time()
assert RateLimit.isAllowed("counting", 0.1)
assert RateLimit.call("counting", allowed_again=0.1, func=obj2.count) == "counted"
assert around(time.time() - s, 0.0)
assert obj2.counted == 4
def testCallAsync(self):
obj1 = ExampleClass()
obj2 = ExampleClass()
s = time.time()
RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #1").join()
assert obj1.counted == 1 # First instant
assert around(time.time() - s, 0.0)
# After that the calls delayed
s = time.time()
t1 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #2") # Dumped by the next call
time.sleep(0.03)
t2 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #3") # Dumped by the next call
time.sleep(0.03)
t3 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #4") # Will be called
assert obj1.counted == 1 # Delay still in progress: Not called yet
t3.join()
assert t3.value == "call #4"
assert around(time.time() - s, 0.1)
# Only the last one called
assert obj1.counted == 2
assert obj1.last_called == "call #4"
# Just called, not allowed again
assert not RateLimit.isAllowed("counting async", 0.1)
s = time.time()
t4 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #5").join()
assert obj1.counted == 3
assert around(time.time() - s, 0.1)
assert not RateLimit.isAllowed("counting async", 0.1)
time.sleep(0.11)
assert RateLimit.isAllowed("counting async", 0.1)

24
src/Test/TestSafeRe.py Normal file
View File

@ -0,0 +1,24 @@
from util import SafeRe
import pytest
class TestSafeRe:
def testSafeMatch(self):
assert SafeRe.match(
"((js|css)/(?!all.(js|css))|data/users/.*db|data/users/.*/.*|data/archived|.*.py)",
"js/ZeroTalk.coffee"
)
assert SafeRe.match(".+/data.json", "data/users/1J3rJ8ecnwH2EPYa6MrgZttBNc61ACFiCj/data.json")
@pytest.mark.parametrize("pattern", ["([a-zA-Z]+)*", "(a|aa)+*", "(a|a?)+", "(.*a){10}", "((?!json).)*$", r"(\w+\d+)+C"])
def testUnsafeMatch(self, pattern):
with pytest.raises(SafeRe.UnsafePatternError) as err:
SafeRe.match(pattern, "aaaaaaaaaaaaaaaaaaaaaaaa!")
assert "Potentially unsafe" in str(err.value)
@pytest.mark.parametrize("pattern", ["^(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)$"])
def testUnsafeRepetition(self, pattern):
with pytest.raises(SafeRe.UnsafePatternError) as err:
SafeRe.match(pattern, "aaaaaaaaaaaaaaaaaaaaaaaa!")
assert "More than" in str(err.value)

70
src/Test/TestSite.py Normal file
View File

@ -0,0 +1,70 @@
import shutil
import os
import pytest
from Site import SiteManager
TEST_DATA_PATH = "src/Test/testdata"
@pytest.mark.usefixtures("resetSettings")
class TestSite:
def testClone(self, site):
assert site.storage.directory == TEST_DATA_PATH + "/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"
# Remove old files
if os.path.isdir(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL"):
shutil.rmtree(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL")
assert not os.path.isfile(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL/content.json")
# Clone 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT to 15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc
new_site = site.clone(
"159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL", "5JU2p5h3R7B1WrbaEdEDNZR7YHqRLGcjNcqwqVQzX2H4SuNe2ee", address_index=1
)
# Check if clone was successful
assert new_site.address == "159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL"
assert new_site.storage.isFile("content.json")
assert new_site.storage.isFile("index.html")
assert new_site.storage.isFile("data/users/content.json")
assert new_site.storage.isFile("data/zeroblog.db")
assert new_site.storage.verifyFiles()["bad_files"] == [] # No bad files allowed
assert new_site.storage.query("SELECT * FROM keyvalue WHERE key = 'title'").fetchone()["value"] == "MyZeroBlog"
# Optional files should be removed
assert len(new_site.storage.loadJson("content.json").get("files_optional", {})) == 0
# Test re-cloning (updating)
# Changes in non-data files should be overwritten
new_site.storage.write("index.html", b"this will be overwritten")
assert new_site.storage.read("index.html") == b"this will be overwritten"
# Changes in data file should be kept after re-cloning
changed_contentjson = new_site.storage.loadJson("content.json")
changed_contentjson["description"] = "Update Description Test"
new_site.storage.writeJson("content.json", changed_contentjson)
changed_data = new_site.storage.loadJson("data/data.json")
changed_data["title"] = "UpdateTest"
new_site.storage.writeJson("data/data.json", changed_data)
# The update should be reflected to database
assert new_site.storage.query("SELECT * FROM keyvalue WHERE key = 'title'").fetchone()["value"] == "UpdateTest"
# Re-clone the site
site.log.debug("Re-cloning")
site.clone("159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL")
assert new_site.storage.loadJson("data/data.json")["title"] == "UpdateTest"
assert new_site.storage.loadJson("content.json")["description"] == "Update Description Test"
assert new_site.storage.read("index.html") != "this will be overwritten"
# Delete created files
new_site.storage.deleteFiles()
assert not os.path.isdir(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL")
# Delete from site registry
assert new_site.address in SiteManager.site_manager.sites
SiteManager.site_manager.delete(new_site.address)
assert new_site.address not in SiteManager.site_manager.sites

View File

@ -0,0 +1,562 @@
import time
import pytest
import mock
import gevent
import gevent.event
import os
from Connection import ConnectionServer
from Config import config
from File import FileRequest
from File import FileServer
from Site.Site import Site
from . import Spy
@pytest.mark.usefixtures("resetTempSettings")
@pytest.mark.usefixtures("resetSettings")
class TestSiteDownload:
def testRename(self, file_server, site, site_temp):
assert site.storage.directory == config.data_dir + "/" + site.address
assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
site_temp.addPeer(file_server.ip, 1544)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
assert site_temp.storage.isFile("content.json")
# Rename non-optional file
os.rename(site.storage.getPath("data/img/domain.png"), site.storage.getPath("data/img/domain-new.png"))
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
content = site.storage.loadJson("content.json")
assert "data/img/domain-new.png" in content["files"]
assert "data/img/domain.png" not in content["files"]
assert not site_temp.storage.isFile("data/img/domain-new.png")
assert site_temp.storage.isFile("data/img/domain.png")
settings_before = site_temp.settings
with Spy.Spy(FileRequest, "route") as requests:
site.publish()
time.sleep(0.1)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
assert "streamFile" not in [req[1] for req in requests]
content = site_temp.storage.loadJson("content.json")
assert "data/img/domain-new.png" in content["files"]
assert "data/img/domain.png" not in content["files"]
assert site_temp.storage.isFile("data/img/domain-new.png")
assert not site_temp.storage.isFile("data/img/domain.png")
assert site_temp.settings["size"] == settings_before["size"]
assert site_temp.settings["size_optional"] == settings_before["size_optional"]
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]
def testRenameOptional(self, file_server, site, site_temp):
assert site.storage.directory == config.data_dir + "/" + site.address
assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
site_temp.addPeer(file_server.ip, 1544)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
assert site_temp.settings["optional_downloaded"] == 0
site_temp.needFile("data/optional.txt")
assert site_temp.settings["optional_downloaded"] > 0
settings_before = site_temp.settings
hashfield_before = site_temp.content_manager.hashfield.tobytes()
# Rename optional file
os.rename(site.storage.getPath("data/optional.txt"), site.storage.getPath("data/optional-new.txt"))
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", remove_missing_optional=True)
content = site.storage.loadJson("content.json")
assert "data/optional-new.txt" in content["files_optional"]
assert "data/optional.txt" not in content["files_optional"]
assert not site_temp.storage.isFile("data/optional-new.txt")
assert site_temp.storage.isFile("data/optional.txt")
with Spy.Spy(FileRequest, "route") as requests:
site.publish()
time.sleep(0.1)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
assert "streamFile" not in [req[1] for req in requests]
content = site_temp.storage.loadJson("content.json")
assert "data/optional-new.txt" in content["files_optional"]
assert "data/optional.txt" not in content["files_optional"]
assert site_temp.storage.isFile("data/optional-new.txt")
assert not site_temp.storage.isFile("data/optional.txt")
assert site_temp.settings["size"] == settings_before["size"]
assert site_temp.settings["size_optional"] == settings_before["size_optional"]
assert site_temp.settings["optional_downloaded"] == settings_before["optional_downloaded"]
assert site_temp.content_manager.hashfield.tobytes() == hashfield_before
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]
def testArchivedDownload(self, file_server, site, site_temp):
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
# Download normally
site_temp.addPeer(file_server.ip, 1544)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"]
assert not bad_files
assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents
assert site_temp.storage.isFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json")
assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2
# Add archived data
assert "archived" not in site.content_manager.contents["data/users/content.json"]["user_contents"]
assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", time.time()-1)
site.content_manager.contents["data/users/content.json"]["user_contents"]["archived"] = {"1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q": time.time()}
site.content_manager.sign("data/users/content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
date_archived = site.content_manager.contents["data/users/content.json"]["user_contents"]["archived"]["1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q"]
assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived-1)
assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived)
assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived+1) # Allow user to update archived data later
# Push archived update
assert not "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
site.publish()
time.sleep(0.1)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
# The archived content should disappear from remote client
assert "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" not in site_temp.content_manager.contents
assert not site_temp.storage.isDir("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q")
assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 1
assert len(list(site_temp.storage.query("SELECT * FROM json WHERE directory LIKE '%1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q%'"))) == 0
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]
def testArchivedBeforeDownload(self, file_server, site, site_temp):
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
# Download normally
site_temp.addPeer(file_server.ip, 1544)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"]
assert not bad_files
assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents
assert site_temp.storage.isFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json")
assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2
# Add archived data
assert not "archived_before" in site.content_manager.contents["data/users/content.json"]["user_contents"]
assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", time.time()-1)
content_modification_time = site.content_manager.contents["data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json"]["modified"]
site.content_manager.contents["data/users/content.json"]["user_contents"]["archived_before"] = content_modification_time
site.content_manager.sign("data/users/content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
date_archived = site.content_manager.contents["data/users/content.json"]["user_contents"]["archived_before"]
assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived-1)
assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived)
assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived+1) # Allow user to update archived data later
# Push archived update
assert not "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
site.publish()
time.sleep(0.1)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
# The archived content should disappear from remote client
assert "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" not in site_temp.content_manager.contents
assert not site_temp.storage.isDir("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q")
assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 1
assert len(list(site_temp.storage.query("SELECT * FROM json WHERE directory LIKE '%1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q%'"))) == 0
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]
# Test when connected peer has the optional file
def testOptionalDownload(self, file_server, site, site_temp):
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = ConnectionServer(file_server.ip, 1545)
site_temp.connection_server = client
site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
site_temp.addPeer(file_server.ip, 1544)
# Download site
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
# Download optional data/optional.txt
site.storage.verifyFiles(quick_check=True) # Find what optional files we have
optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt")
assert site.content_manager.hashfield.hasHash(optional_file_info["sha512"])
assert not site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"])
assert not site_temp.storage.isFile("data/optional.txt")
assert site.storage.isFile("data/optional.txt")
site_temp.needFile("data/optional.txt")
assert site_temp.storage.isFile("data/optional.txt")
# Optional user file
assert not site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
optional_file_info = site_temp.content_manager.getFileInfo(
"data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif"
)
assert site.content_manager.hashfield.hasHash(optional_file_info["sha512"])
assert not site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"])
site_temp.needFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"])
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]
# Test when connected peer does not has the file, so ask him if he know someone who has it
def testFindOptional(self, file_server, site, site_temp):
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init full source server (has optional files)
site_full = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
file_server_full = FileServer(file_server.ip, 1546)
site_full.connection_server = file_server_full
def listen():
ConnectionServer.start(file_server_full)
ConnectionServer.listen(file_server_full)
gevent.spawn(listen)
time.sleep(0.001) # Port opening
file_server_full.sites[site_full.address] = site_full # Add site
site_full.storage.verifyFiles(quick_check=True) # Check optional files
site_full_peer = site.addPeer(file_server.ip, 1546) # Add it to source server
hashfield = site_full_peer.updateHashfield() # Update hashfield
assert len(site_full.content_manager.hashfield) == 8
assert hashfield
assert site_full.storage.isFile("data/optional.txt")
assert site_full.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert len(site_full_peer.hashfield) == 8
# Remove hashes from source server
for hash in list(site.content_manager.hashfield):
site.content_manager.hashfield.remove(hash)
# Init client server
site_temp.connection_server = ConnectionServer(file_server.ip, 1545)
site_temp.addPeer(file_server.ip, 1544) # Add source server
# Download normal files
site_temp.log.info("Start Downloading site")
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
# Download optional data/optional.txt
optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt")
optional_file_info2 = site_temp.content_manager.getFileInfo("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert not site_temp.storage.isFile("data/optional.txt")
assert not site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert not site.content_manager.hashfield.hasHash(optional_file_info["sha512"]) # Source server don't know he has the file
assert not site.content_manager.hashfield.hasHash(optional_file_info2["sha512"]) # Source server don't know he has the file
assert site_full_peer.hashfield.hasHash(optional_file_info["sha512"]) # Source full peer on source server has the file
assert site_full_peer.hashfield.hasHash(optional_file_info2["sha512"]) # Source full peer on source server has the file
assert site_full.content_manager.hashfield.hasHash(optional_file_info["sha512"]) # Source full server he has the file
assert site_full.content_manager.hashfield.hasHash(optional_file_info2["sha512"]) # Source full server he has the file
site_temp.log.info("Request optional files")
with Spy.Spy(FileRequest, "route") as requests:
# Request 2 file same time
threads = []
threads.append(site_temp.needFile("data/optional.txt", blocking=False))
threads.append(site_temp.needFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif", blocking=False))
gevent.joinall(threads)
assert len([request for request in requests if request[1] == "findHashIds"]) == 1 # findHashids should call only once
assert site_temp.storage.isFile("data/optional.txt")
assert site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert site_temp.storage.deleteFiles()
file_server_full.stop()
[connection.close() for connection in file_server.connections]
site_full.content_manager.contents.db.close("FindOptional test end")
def testUpdate(self, file_server, site, site_temp):
assert site.storage.directory == config.data_dir + "/" + site.address
assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
# Don't try to find peers from the net
site.announce = mock.MagicMock(return_value=True)
site_temp.announce = mock.MagicMock(return_value=True)
# Connect peers
site_temp.addPeer(file_server.ip, 1544)
# Download site from site to site_temp
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
assert len(site_temp.bad_files) == 1
# Update file
data_original = site.storage.open("data/data.json").read()
data_new = data_original.replace(b'"ZeroBlog"', b'"UpdatedZeroBlog"')
assert data_original != data_new
site.storage.open("data/data.json", "wb").write(data_new)
assert site.storage.open("data/data.json").read() == data_new
assert site_temp.storage.open("data/data.json").read() == data_original
site.log.info("Publish new data.json without patch")
# Publish without patch
with Spy.Spy(FileRequest, "route") as requests:
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
site.publish()
time.sleep(0.1)
site.log.info("Downloading site")
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
assert len([request for request in requests if request[1] in ("getFile", "streamFile")]) == 1
assert site_temp.storage.open("data/data.json").read() == data_new
# Close connection to avoid update spam limit
list(site.peers.values())[0].remove()
site.addPeer(file_server.ip, 1545)
list(site_temp.peers.values())[0].ping() # Connect back
time.sleep(0.1)
# Update with patch
data_new = data_original.replace(b'"ZeroBlog"', b'"PatchedZeroBlog"')
assert data_original != data_new
site.storage.open("data/data.json-new", "wb").write(data_new)
assert site.storage.open("data/data.json-new").read() == data_new
assert site_temp.storage.open("data/data.json").read() != data_new
# Generate diff
diffs = site.content_manager.getDiffs("content.json")
assert not site.storage.isFile("data/data.json-new") # New data file removed
assert site.storage.open("data/data.json").read() == data_new # -new postfix removed
assert "data/data.json" in diffs
assert diffs["data/data.json"] == [('=', 2), ('-', 29), ('+', [b'\t"title": "PatchedZeroBlog",\n']), ('=', 31102)]
# Publish with patch
site.log.info("Publish new data.json with patch")
with Spy.Spy(FileRequest, "route") as requests:
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
event_done = gevent.event.AsyncResult()
site.publish(diffs=diffs)
time.sleep(0.1)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
assert [request for request in requests if request[1] in ("getFile", "streamFile")] == []
assert site_temp.storage.open("data/data.json").read() == data_new
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]
def testBigUpdate(self, file_server, site, site_temp):
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
# Connect peers
site_temp.addPeer(file_server.ip, 1544)
# Download site from site to site_temp
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
assert list(site_temp.bad_files.keys()) == ["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"]
# Update file
data_original = site.storage.open("data/data.json").read()
data_new = data_original.replace(b'"ZeroBlog"', b'"PatchedZeroBlog"')
assert data_original != data_new
site.storage.open("data/data.json-new", "wb").write(data_new)
assert site.storage.open("data/data.json-new").read() == data_new
assert site_temp.storage.open("data/data.json").read() != data_new
# Generate diff
diffs = site.content_manager.getDiffs("content.json")
assert not site.storage.isFile("data/data.json-new") # New data file removed
assert site.storage.open("data/data.json").read() == data_new # -new postfix removed
assert "data/data.json" in diffs
content_json = site.storage.loadJson("content.json")
content_json["description"] = "BigZeroBlog" * 1024 * 10
site.storage.writeJson("content.json", content_json)
site.content_manager.loadContent("content.json", force=True)
# Publish with patch
site.log.info("Publish new data.json with patch")
with Spy.Spy(FileRequest, "route") as requests:
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
assert site.storage.getSize("content.json") > 10 * 1024 # Make it a big content.json
site.publish(diffs=diffs)
time.sleep(0.1)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
file_requests = [request for request in requests if request[1] in ("getFile", "streamFile")]
assert len(file_requests) == 1
assert site_temp.storage.open("data/data.json").read() == data_new
assert site_temp.storage.open("content.json").read() == site.storage.open("content.json").read()
# Test what happened if the content.json of the site is bigger than the site limit
def testHugeContentSiteUpdate(self, file_server, site, site_temp):
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
# Connect peers
site_temp.addPeer(file_server.ip, 1544)
# Download site from site to site_temp
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
site_temp.settings["size_limit"] = int(20 * 1024 *1024)
site_temp.saveSettings()
# Raise limit size to 20MB on site so it can be signed
site.settings["size_limit"] = int(20 * 1024 *1024)
site.saveSettings()
content_json = site.storage.loadJson("content.json")
content_json["description"] = "PartirUnJour" * 1024 * 1024
site.storage.writeJson("content.json", content_json)
changed, deleted = site.content_manager.loadContent("content.json", force=True)
# Make sure we have 2 differents content.json
assert site_temp.storage.open("content.json").read() != site.storage.open("content.json").read()
# Generate diff
diffs = site.content_manager.getDiffs("content.json")
# Publish with patch
site.log.info("Publish new content.json bigger than 10MB")
with Spy.Spy(FileRequest, "route") as requests:
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
assert site.storage.getSize("content.json") > 10 * 1024 * 1024 # verify it over 10MB
time.sleep(0.1)
site.publish(diffs=diffs)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
assert site_temp.storage.getSize("content.json") < site_temp.getSizeLimit() * 1024 * 1024
assert site_temp.storage.open("content.json").read() == site.storage.open("content.json").read()
def testUnicodeFilename(self, file_server, site, site_temp):
assert site.storage.directory == config.data_dir + "/" + site.address
assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
site_temp.addPeer(file_server.ip, 1544)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
site.storage.write("data/img/árvíztűrő.png", b"test")
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
content = site.storage.loadJson("content.json")
assert "data/img/árvíztűrő.png" in content["files"]
assert not site_temp.storage.isFile("data/img/árvíztűrő.png")
settings_before = site_temp.settings
with Spy.Spy(FileRequest, "route") as requests:
site.publish()
time.sleep(0.1)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
assert len([req[1] for req in requests if req[1] == "streamFile"]) == 1
content = site_temp.storage.loadJson("content.json")
assert "data/img/árvíztűrő.png" in content["files"]
assert site_temp.storage.isFile("data/img/árvíztűrő.png")
assert site_temp.settings["size"] == settings_before["size"]
assert site_temp.settings["size_optional"] == settings_before["size_optional"]
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]

View File

@ -0,0 +1,25 @@
import pytest
@pytest.mark.usefixtures("resetSettings")
class TestSiteStorage:
def testWalk(self, site):
# Rootdir
walk_root = list(site.storage.walk(""))
assert "content.json" in walk_root
assert "css/all.css" in walk_root
# Subdir
assert list(site.storage.walk("data-default")) == ["data.json", "users/content-default.json"]
def testList(self, site):
# Rootdir
list_root = list(site.storage.list(""))
assert "content.json" in list_root
assert "css/all.css" not in list_root
# Subdir
assert set(site.storage.list("data-default")) == set(["data.json", "users"])
def testDbRebuild(self, site):
assert site.storage.rebuildDb()

163
src/Test/TestThreadPool.py Normal file
View File

@ -0,0 +1,163 @@
import time
import threading
import gevent
import pytest
from util import ThreadPool
class TestThreadPool:
def testExecutionOrder(self):
with ThreadPool.ThreadPool(4) as pool:
events = []
@pool.wrap
def blocker():
events.append("S")
out = 0
for i in range(10000000):
if i == 3000000:
events.append("M")
out += 1
events.append("D")
return out
threads = []
for i in range(3):
threads.append(gevent.spawn(blocker))
gevent.joinall(threads)
assert events == ["S"] * 3 + ["M"] * 3 + ["D"] * 3
res = blocker()
assert res == 10000000
def testLockBlockingSameThread(self):
lock = ThreadPool.Lock()
s = time.time()
def unlocker():
time.sleep(1)
lock.release()
gevent.spawn(unlocker)
lock.acquire(True)
lock.acquire(True, timeout=2)
unlock_taken = time.time() - s
assert 1.0 < unlock_taken < 1.5
def testLockBlockingDifferentThread(self):
lock = ThreadPool.Lock()
def locker():
lock.acquire(True)
time.sleep(0.5)
lock.release()
with ThreadPool.ThreadPool(10) as pool:
threads = [
pool.spawn(locker),
pool.spawn(locker),
gevent.spawn(locker),
pool.spawn(locker)
]
time.sleep(0.1)
s = time.time()
lock.acquire(True, 5.0)
unlock_taken = time.time() - s
assert 1.8 < unlock_taken < 2.2
gevent.joinall(threads)
def testMainLoopCallerThreadId(self):
main_thread_id = threading.current_thread().ident
with ThreadPool.ThreadPool(5) as pool:
def getThreadId(*args, **kwargs):
return threading.current_thread().ident
t = pool.spawn(getThreadId)
assert t.get() != main_thread_id
t = pool.spawn(lambda: ThreadPool.main_loop.call(getThreadId))
assert t.get() == main_thread_id
def testMainLoopCallerGeventSpawn(self):
main_thread_id = threading.current_thread().ident
with ThreadPool.ThreadPool(5) as pool:
def waiter():
time.sleep(1)
return threading.current_thread().ident
def geventSpawner():
event = ThreadPool.main_loop.call(gevent.spawn, waiter)
with pytest.raises(Exception) as greenlet_err:
event.get()
assert str(greenlet_err.value) == "cannot switch to a different thread"
waiter_thread_id = ThreadPool.main_loop.call(event.get)
return waiter_thread_id
s = time.time()
waiter_thread_id = pool.apply(geventSpawner)
assert main_thread_id == waiter_thread_id
time_taken = time.time() - s
assert 0.9 < time_taken < 1.2
def testEvent(self):
with ThreadPool.ThreadPool(5) as pool:
event = ThreadPool.Event()
def setter():
time.sleep(1)
event.set("done!")
def getter():
return event.get()
pool.spawn(setter)
t_gevent = gevent.spawn(getter)
t_pool = pool.spawn(getter)
s = time.time()
assert event.get() == "done!"
time_taken = time.time() - s
gevent.joinall([t_gevent, t_pool])
assert t_gevent.get() == "done!"
assert t_pool.get() == "done!"
assert 0.9 < time_taken < 1.2
with pytest.raises(Exception) as err:
event.set("another result")
assert "Event already has value" in str(err.value)
def testMemoryLeak(self):
import gc
thread_objs_before = [id(obj) for obj in gc.get_objects() if "threadpool" in str(type(obj))]
def worker():
time.sleep(0.1)
return "ok"
def poolTest():
with ThreadPool.ThreadPool(5) as pool:
for i in range(20):
pool.spawn(worker)
for i in range(5):
poolTest()
new_thread_objs = [obj for obj in gc.get_objects() if "threadpool" in str(type(obj)) and id(obj) not in thread_objs_before]
#print("New objs:", new_thread_objs, "run:", num_run)
# Make sure no threadpool object left behind
assert not new_thread_objs

153
src/Test/TestTor.py Normal file
View File

@ -0,0 +1,153 @@
import time
import pytest
import mock
from File import FileServer
from Crypt import CryptRsa
from Config import config
@pytest.mark.usefixtures("resetSettings")
@pytest.mark.usefixtures("resetTempSettings")
class TestTor:
def testDownload(self, tor_manager):
for retry in range(15):
time.sleep(1)
if tor_manager.enabled and tor_manager.conn:
break
assert tor_manager.enabled
def testManagerConnection(self, tor_manager):
assert "250-version" in tor_manager.request("GETINFO version")
def testAddOnion(self, tor_manager):
# Add
address = tor_manager.addOnion()
assert address
assert address in tor_manager.privatekeys
# Delete
assert tor_manager.delOnion(address)
assert address not in tor_manager.privatekeys
def testSignOnion(self, tor_manager):
address = tor_manager.addOnion()
# Sign
sign = CryptRsa.sign(b"hello", tor_manager.getPrivatekey(address))
assert len(sign) == 128
# Verify
publickey = CryptRsa.privatekeyToPublickey(tor_manager.getPrivatekey(address))
assert len(publickey) == 140
assert CryptRsa.verify(b"hello", publickey, sign)
assert not CryptRsa.verify(b"not hello", publickey, sign)
# Pub to address
assert CryptRsa.publickeyToOnion(publickey) == address
# Delete
tor_manager.delOnion(address)
@pytest.mark.slow
def testConnection(self, tor_manager, file_server, site, site_temp):
file_server.tor_manager.start_onions = True
address = file_server.tor_manager.getOnion(site.address)
assert address
print("Connecting to", address)
for retry in range(5): # Wait for hidden service creation
time.sleep(10)
try:
connection = file_server.getConnection(address + ".onion", 1544)
if connection:
break
except Exception as err:
continue
assert connection.handshake
assert not connection.handshake["peer_id"] # No peer_id for Tor connections
# Return the same connection without site specified
assert file_server.getConnection(address + ".onion", 1544) == connection
# No reuse for different site
assert file_server.getConnection(address + ".onion", 1544, site=site) != connection
assert file_server.getConnection(address + ".onion", 1544, site=site) == file_server.getConnection(address + ".onion", 1544, site=site)
site_temp.address = "1OTHERSITE"
assert file_server.getConnection(address + ".onion", 1544, site=site) != file_server.getConnection(address + ".onion", 1544, site=site_temp)
# Only allow to query from the locked site
file_server.sites[site.address] = site
connection_locked = file_server.getConnection(address + ".onion", 1544, site=site)
assert "body" in connection_locked.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0})
assert connection_locked.request("getFile", {"site": "1OTHERSITE", "inner_path": "content.json", "location": 0})["error"] == "Invalid site"
def testPex(self, file_server, site, site_temp):
# Register site to currently running fileserver
site.connection_server = file_server
file_server.sites[site.address] = site
# Create a new file server to emulate new peer connecting to our peer
file_server_temp = FileServer(file_server.ip, 1545)
site_temp.connection_server = file_server_temp
file_server_temp.sites[site_temp.address] = site_temp
# We will request peers from this
peer_source = site_temp.addPeer(file_server.ip, 1544)
# Get ip4 peers from source site
site.addPeer("1.2.3.4", 1555) # Add peer to source site
assert peer_source.pex(need_num=10) == 1
assert len(site_temp.peers) == 2
assert "1.2.3.4:1555" in site_temp.peers
# Get onion peers from source site
site.addPeer("bka4ht2bzxchy44r.onion", 1555)
assert "bka4ht2bzxchy44r.onion:1555" not in site_temp.peers
# Don't add onion peers if not supported
assert "onion" not in file_server_temp.supported_ip_types
assert peer_source.pex(need_num=10) == 0
file_server_temp.supported_ip_types.append("onion")
assert peer_source.pex(need_num=10) == 1
assert "bka4ht2bzxchy44r.onion:1555" in site_temp.peers
def testFindHash(self, tor_manager, file_server, site, site_temp):
file_server.ip_incoming = {} # Reset flood protection
file_server.sites[site.address] = site
file_server.tor_manager = tor_manager
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
# Add file_server as peer to client
peer_file_server = site_temp.addPeer(file_server.ip, 1544)
assert peer_file_server.findHashIds([1234]) == {}
# Add fake peer with requred hash
fake_peer_1 = site.addPeer("bka4ht2bzxchy44r.onion", 1544)
fake_peer_1.hashfield.append(1234)
fake_peer_2 = site.addPeer("1.2.3.5", 1545)
fake_peer_2.hashfield.append(1234)
fake_peer_2.hashfield.append(1235)
fake_peer_3 = site.addPeer("1.2.3.6", 1546)
fake_peer_3.hashfield.append(1235)
fake_peer_3.hashfield.append(1236)
res = peer_file_server.findHashIds([1234, 1235])
assert sorted(res[1234]) == [('1.2.3.5', 1545), ("bka4ht2bzxchy44r.onion", 1544)]
assert sorted(res[1235]) == [('1.2.3.5', 1545), ('1.2.3.6', 1546)]
# Test my address adding
site.content_manager.hashfield.append(1234)
res = peer_file_server.findHashIds([1234, 1235])
assert sorted(res[1234]) == [('1.2.3.5', 1545), (file_server.ip, 1544), ("bka4ht2bzxchy44r.onion", 1544)]
assert sorted(res[1235]) == [('1.2.3.5', 1545), ('1.2.3.6', 1546)]
def testSiteOnion(self, tor_manager):
with mock.patch.object(config, "tor", "always"):
assert tor_manager.getOnion("address1") != tor_manager.getOnion("address2")
assert tor_manager.getOnion("address1") == tor_manager.getOnion("address1")

61
src/Test/TestTranslate.py Normal file
View File

@ -0,0 +1,61 @@
from Translate import Translate
class TestTranslate:
def testTranslateStrict(self):
translate = Translate()
data = """
translated = _("original")
not_translated = "original"
"""
data_translated = translate.translateData(data, {"_(original)": "translated"})
assert 'translated = _("translated")' in data_translated
assert 'not_translated = "original"' in data_translated
def testTranslateStrictNamed(self):
translate = Translate()
data = """
translated = _("original", "original named")
translated_other = _("original", "original other named")
not_translated = "original"
"""
data_translated = translate.translateData(data, {"_(original, original named)": "translated"})
assert 'translated = _("translated")' in data_translated
assert 'not_translated = "original"' in data_translated
def testTranslateUtf8(self):
translate = Translate()
data = """
greeting = "Hi again árvztűrőtökörfúrógép!"
"""
data_translated = translate.translateData(data, {"Hi again árvztűrőtökörfúrógép!": "Üdv újra árvztűrőtökörfúrógép!"})
assert data_translated == """
greeting = "Üdv újra árvztűrőtökörfúrógép!"
"""
def testTranslateEscape(self):
_ = Translate()
_["Hello"] = "Szia"
# Simple escaping
data = "{_[Hello]} {username}!"
username = "Hacker<script>alert('boom')</script>"
data_translated = _(data)
assert 'Szia' in data_translated
assert '<' not in data_translated
assert data_translated == "Szia Hacker&lt;script&gt;alert(&#x27;boom&#x27;)&lt;/script&gt;!"
# Escaping dicts
user = {"username": "Hacker<script>alert('boom')</script>"}
data = "{_[Hello]} {user[username]}!"
data_translated = _(data)
assert 'Szia' in data_translated
assert '<' not in data_translated
assert data_translated == "Szia Hacker&lt;script&gt;alert(&#x27;boom&#x27;)&lt;/script&gt;!"
# Escaping lists
users = [{"username": "Hacker<script>alert('boom')</script>"}]
data = "{_[Hello]} {users[0][username]}!"
data_translated = _(data)
assert 'Szia' in data_translated
assert '<' not in data_translated
assert data_translated == "Szia Hacker&lt;script&gt;alert(&#x27;boom&#x27;)&lt;/script&gt;!"

View File

@ -0,0 +1,11 @@
import sys
import pytest
@pytest.mark.usefixtures("resetSettings")
class TestUiWebsocket:
def testPermission(self, ui_websocket):
res = ui_websocket.testAction("ping")
assert res == "pong"
res = ui_websocket.testAction("certList")
assert "You don't have permission" in res["error"]

274
src/Test/TestUpnpPunch.py Normal file
View File

@ -0,0 +1,274 @@
import socket
from urllib.parse import urlparse
import pytest
import mock
from util import UpnpPunch as upnp
@pytest.fixture
def mock_socket():
mock_socket = mock.MagicMock()
mock_socket.recv = mock.MagicMock(return_value=b'Hello')
mock_socket.bind = mock.MagicMock()
mock_socket.send_to = mock.MagicMock()
return mock_socket
@pytest.fixture
def url_obj():
return urlparse('http://192.168.1.1/ctrlPoint.xml')
@pytest.fixture(params=['WANPPPConnection', 'WANIPConnection'])
def igd_profile(request):
return """<root><serviceList><service>
<serviceType>urn:schemas-upnp-org:service:{}:1</serviceType>
<serviceId>urn:upnp-org:serviceId:wanpppc:pppoa</serviceId>
<controlURL>/upnp/control/wanpppcpppoa</controlURL>
<eventSubURL>/upnp/event/wanpppcpppoa</eventSubURL>
<SCPDURL>/WANPPPConnection.xml</SCPDURL>
</service></serviceList></root>""".format(request.param)
@pytest.fixture
def httplib_response():
class FakeResponse(object):
def __init__(self, status=200, body='OK'):
self.status = status
self.body = body
def read(self):
return self.body
return FakeResponse
class TestUpnpPunch(object):
def test_perform_m_search(self, mock_socket):
local_ip = '127.0.0.1'
with mock.patch('util.UpnpPunch.socket.socket',
return_value=mock_socket):
result = upnp.perform_m_search(local_ip)
assert result == 'Hello'
assert local_ip == mock_socket.bind.call_args_list[0][0][0][0]
assert ('239.255.255.250',
1900) == mock_socket.sendto.call_args_list[0][0][1]
def test_perform_m_search_socket_error(self, mock_socket):
mock_socket.recv.side_effect = socket.error('Timeout error')
with mock.patch('util.UpnpPunch.socket.socket',
return_value=mock_socket):
with pytest.raises(upnp.UpnpError):
upnp.perform_m_search('127.0.0.1')
def test_retrieve_location_from_ssdp(self, url_obj):
ctrl_location = url_obj.geturl()
parsed_location = urlparse(ctrl_location)
rsp = ('auth: gibberish\r\nlocation: {0}\r\n'
'Content-Type: text/html\r\n\r\n').format(ctrl_location)
result = upnp._retrieve_location_from_ssdp(rsp)
assert result == parsed_location
def test_retrieve_location_from_ssdp_no_header(self):
rsp = 'auth: gibberish\r\nContent-Type: application/json\r\n\r\n'
with pytest.raises(upnp.IGDError):
upnp._retrieve_location_from_ssdp(rsp)
def test_retrieve_igd_profile(self, url_obj):
with mock.patch('urllib.request.urlopen') as mock_urlopen:
upnp._retrieve_igd_profile(url_obj)
mock_urlopen.assert_called_with(url_obj.geturl(), timeout=5)
def test_retrieve_igd_profile_timeout(self, url_obj):
with mock.patch('urllib.request.urlopen') as mock_urlopen:
mock_urlopen.side_effect = socket.error('Timeout error')
with pytest.raises(upnp.IGDError):
upnp._retrieve_igd_profile(url_obj)
def test_parse_igd_profile_service_type(self, igd_profile):
control_path, upnp_schema = upnp._parse_igd_profile(igd_profile)
assert control_path == '/upnp/control/wanpppcpppoa'
assert upnp_schema in ('WANPPPConnection', 'WANIPConnection',)
def test_parse_igd_profile_no_ctrlurl(self, igd_profile):
igd_profile = igd_profile.replace('controlURL', 'nope')
with pytest.raises(upnp.IGDError):
control_path, upnp_schema = upnp._parse_igd_profile(igd_profile)
def test_parse_igd_profile_no_schema(self, igd_profile):
igd_profile = igd_profile.replace('Connection', 'nope')
with pytest.raises(upnp.IGDError):
control_path, upnp_schema = upnp._parse_igd_profile(igd_profile)
def test_create_open_message_parsable(self):
from xml.parsers.expat import ExpatError
msg, _ = upnp._create_open_message('127.0.0.1', 8888)
try:
upnp.parseString(msg)
except ExpatError as e:
pytest.fail('Incorrect XML message: {}'.format(e))
def test_create_open_message_contains_right_stuff(self):
settings = {'description': 'test desc',
'protocol': 'test proto',
'upnp_schema': 'test schema'}
msg, fn_name = upnp._create_open_message('127.0.0.1', 8888, **settings)
assert fn_name == 'AddPortMapping'
assert '127.0.0.1' in msg
assert '8888' in msg
assert settings['description'] in msg
assert settings['protocol'] in msg
assert settings['upnp_schema'] in msg
def test_parse_for_errors_bad_rsp(self, httplib_response):
rsp = httplib_response(status=500)
with pytest.raises(upnp.IGDError) as err:
upnp._parse_for_errors(rsp)
assert 'Unable to parse' in str(err.value)
def test_parse_for_errors_error(self, httplib_response):
soap_error = ('<document>'
'<errorCode>500</errorCode>'
'<errorDescription>Bad request</errorDescription>'
'</document>')
rsp = httplib_response(status=500, body=soap_error)
with pytest.raises(upnp.IGDError) as err:
upnp._parse_for_errors(rsp)
assert 'SOAP request error' in str(err.value)
def test_parse_for_errors_good_rsp(self, httplib_response):
rsp = httplib_response(status=200)
assert rsp == upnp._parse_for_errors(rsp)
def test_send_requests_success(self):
with mock.patch(
'util.UpnpPunch._send_soap_request') as mock_send_request:
mock_send_request.return_value = mock.MagicMock(status=200)
upnp._send_requests(['msg'], None, None, None)
assert mock_send_request.called
def test_send_requests_failed(self):
with mock.patch(
'util.UpnpPunch._send_soap_request') as mock_send_request:
mock_send_request.return_value = mock.MagicMock(status=500)
with pytest.raises(upnp.UpnpError):
upnp._send_requests(['msg'], None, None, None)
assert mock_send_request.called
def test_collect_idg_data(self):
pass
@mock.patch('util.UpnpPunch._get_local_ips')
@mock.patch('util.UpnpPunch._collect_idg_data')
@mock.patch('util.UpnpPunch._send_requests')
def test_ask_to_open_port_success(self, mock_send_requests,
mock_collect_idg, mock_local_ips):
mock_collect_idg.return_value = {'upnp_schema': 'schema-yo'}
mock_local_ips.return_value = ['192.168.0.12']
result = upnp.ask_to_open_port(retries=5)
soap_msg = mock_send_requests.call_args[0][0][0][0]
assert result is True
assert mock_collect_idg.called
assert '192.168.0.12' in soap_msg
assert '15441' in soap_msg
assert 'schema-yo' in soap_msg
@mock.patch('util.UpnpPunch._get_local_ips')
@mock.patch('util.UpnpPunch._collect_idg_data')
@mock.patch('util.UpnpPunch._send_requests')
def test_ask_to_open_port_failure(self, mock_send_requests,
mock_collect_idg, mock_local_ips):
mock_local_ips.return_value = ['192.168.0.12']
mock_collect_idg.return_value = {'upnp_schema': 'schema-yo'}
mock_send_requests.side_effect = upnp.UpnpError()
with pytest.raises(upnp.UpnpError):
upnp.ask_to_open_port()
@mock.patch('util.UpnpPunch._collect_idg_data')
@mock.patch('util.UpnpPunch._send_requests')
def test_orchestrate_soap_request(self, mock_send_requests,
mock_collect_idg):
soap_mock = mock.MagicMock()
args = ['127.0.0.1', 31337, soap_mock, 'upnp-test', {'upnp_schema':
'schema-yo'}]
mock_collect_idg.return_value = args[-1]
upnp._orchestrate_soap_request(*args[:-1])
assert mock_collect_idg.called
soap_mock.assert_called_with(
*args[:2] + ['upnp-test', 'UDP', 'schema-yo'])
assert mock_send_requests.called
@mock.patch('util.UpnpPunch._collect_idg_data')
@mock.patch('util.UpnpPunch._send_requests')
def test_orchestrate_soap_request_without_desc(self, mock_send_requests,
mock_collect_idg):
soap_mock = mock.MagicMock()
args = ['127.0.0.1', 31337, soap_mock, {'upnp_schema': 'schema-yo'}]
mock_collect_idg.return_value = args[-1]
upnp._orchestrate_soap_request(*args[:-1])
assert mock_collect_idg.called
soap_mock.assert_called_with(*args[:2] + [None, 'UDP', 'schema-yo'])
assert mock_send_requests.called
def test_create_close_message_parsable(self):
from xml.parsers.expat import ExpatError
msg, _ = upnp._create_close_message('127.0.0.1', 8888)
try:
upnp.parseString(msg)
except ExpatError as e:
pytest.fail('Incorrect XML message: {}'.format(e))
def test_create_close_message_contains_right_stuff(self):
settings = {'protocol': 'test proto',
'upnp_schema': 'test schema'}
msg, fn_name = upnp._create_close_message('127.0.0.1', 8888, **
settings)
assert fn_name == 'DeletePortMapping'
assert '8888' in msg
assert settings['protocol'] in msg
assert settings['upnp_schema'] in msg
@mock.patch('util.UpnpPunch._get_local_ips')
@mock.patch('util.UpnpPunch._orchestrate_soap_request')
def test_communicate_with_igd_success(self, mock_orchestrate,
mock_get_local_ips):
mock_get_local_ips.return_value = ['192.168.0.12']
upnp._communicate_with_igd()
assert mock_get_local_ips.called
assert mock_orchestrate.called
@mock.patch('util.UpnpPunch._get_local_ips')
@mock.patch('util.UpnpPunch._orchestrate_soap_request')
def test_communicate_with_igd_succeed_despite_single_failure(
self, mock_orchestrate, mock_get_local_ips):
mock_get_local_ips.return_value = ['192.168.0.12']
mock_orchestrate.side_effect = [upnp.UpnpError, None]
upnp._communicate_with_igd(retries=2)
assert mock_get_local_ips.called
assert mock_orchestrate.called
@mock.patch('util.UpnpPunch._get_local_ips')
@mock.patch('util.UpnpPunch._orchestrate_soap_request')
def test_communicate_with_igd_total_failure(self, mock_orchestrate,
mock_get_local_ips):
mock_get_local_ips.return_value = ['192.168.0.12']
mock_orchestrate.side_effect = [upnp.UpnpError, upnp.IGDError]
with pytest.raises(upnp.UpnpError):
upnp._communicate_with_igd(retries=2)
assert mock_get_local_ips.called
assert mock_orchestrate.called

50
src/Test/TestUser.py Normal file
View File

@ -0,0 +1,50 @@
import pytest
from Crypt import CryptBitcoin
@pytest.mark.usefixtures("resetSettings")
class TestUser:
def testAddress(self, user):
assert user.master_address == "15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc"
address_index = 1458664252141532163166741013621928587528255888800826689784628722366466547364755811
assert user.getAddressAuthIndex("15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc") == address_index
# Re-generate privatekey based on address_index
def testNewSite(self, user):
address, address_index, site_data = user.getNewSiteData() # Create a new random site
assert CryptBitcoin.hdPrivatekey(user.master_seed, address_index) == site_data["privatekey"]
user.sites = {} # Reset user data
# Site address and auth address is different
assert user.getSiteData(address)["auth_address"] != address
# Re-generate auth_privatekey for site
assert user.getSiteData(address)["auth_privatekey"] == site_data["auth_privatekey"]
def testAuthAddress(self, user):
# Auth address without Cert
auth_address = user.getAuthAddress("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr")
assert auth_address == "1MyJgYQjeEkR9QD66nkfJc9zqi9uUy5Lr2"
auth_privatekey = user.getAuthPrivatekey("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr")
assert CryptBitcoin.privatekeyToAddress(auth_privatekey) == auth_address
def testCert(self, user):
cert_auth_address = user.getAuthAddress("1iD5ZQJMNXu43w1qLB8sfdHVKppVMduGz") # Add site to user's registry
# Add cert
user.addCert(cert_auth_address, "zeroid.bit", "faketype", "fakeuser", "fakesign")
user.setCert("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr", "zeroid.bit")
# By using certificate the auth address should be same as the certificate provider
assert user.getAuthAddress("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr") == cert_auth_address
auth_privatekey = user.getAuthPrivatekey("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr")
assert CryptBitcoin.privatekeyToAddress(auth_privatekey) == cert_auth_address
# Test delete site data
assert "1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr" in user.sites
user.deleteSiteData("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr")
assert "1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr" not in user.sites
# Re-create add site should generate normal, unique auth_address
assert not user.getAuthAddress("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr") == cert_auth_address
assert user.getAuthAddress("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr") == "1MyJgYQjeEkR9QD66nkfJc9zqi9uUy5Lr2"

105
src/Test/TestWeb.py Normal file
View File

@ -0,0 +1,105 @@
import urllib.request
import pytest
try:
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support.expected_conditions import staleness_of, title_is
from selenium.common.exceptions import NoSuchElementException
except:
pass
class WaitForPageLoad(object):
def __init__(self, browser):
self.browser = browser
def __enter__(self):
self.old_page = self.browser.find_element_by_tag_name('html')
def __exit__(self, *args):
WebDriverWait(self.browser, 10).until(staleness_of(self.old_page))
def getContextUrl(browser):
return browser.execute_script("return window.location.toString()")
def getUrl(url):
content = urllib.request.urlopen(url).read()
assert "server error" not in content.lower(), "Got a server error! " + repr(url)
return content
@pytest.mark.usefixtures("resetSettings")
@pytest.mark.webtest
class TestWeb:
def testFileSecurity(self, site_url):
assert "Not Found" in getUrl("%s/media/sites.json" % site_url)
assert "Forbidden" in getUrl("%s/media/./sites.json" % site_url)
assert "Forbidden" in getUrl("%s/media/../config.py" % site_url)
assert "Forbidden" in getUrl("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url)
assert "Forbidden" in getUrl("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url)
assert "Forbidden" in getUrl("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url)
assert "Not Found" in getUrl("%s/raw/sites.json" % site_url)
assert "Forbidden" in getUrl("%s/raw/./sites.json" % site_url)
assert "Forbidden" in getUrl("%s/raw/../config.py" % site_url)
assert "Forbidden" in getUrl("%s/raw/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url)
assert "Forbidden" in getUrl("%s/raw/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url)
assert "Forbidden" in getUrl("%s/raw/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url)
assert "Forbidden" in getUrl("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url)
assert "Forbidden" in getUrl("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url)
assert "Forbidden" in getUrl("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url)
assert "Forbidden" in getUrl("%s/content.db" % site_url)
assert "Forbidden" in getUrl("%s/./users.json" % site_url)
assert "Forbidden" in getUrl("%s/./key-rsa.pem" % site_url)
assert "Forbidden" in getUrl("%s/././././././././././//////sites.json" % site_url)
def testLinkSecurity(self, browser, site_url):
browser.get("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/test/security.html" % site_url)
WebDriverWait(browser, 10).until(title_is("ZeroHello - ZeroNet"))
assert getContextUrl(browser) == "%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/test/security.html" % site_url
# Switch to inner frame
browser.switch_to.frame(browser.find_element_by_id("inner-iframe"))
assert "wrapper_nonce" in getContextUrl(browser)
assert browser.find_element_by_id("script_output").text == "Result: Works"
browser.switch_to.default_content()
# Clicking on links without target
browser.switch_to.frame(browser.find_element_by_id("inner-iframe"))
with WaitForPageLoad(browser):
browser.find_element_by_id("link_to_current").click()
assert "wrapper_nonce" not in getContextUrl(browser) # The browser object back to default content
assert "Forbidden" not in browser.page_source
# Check if we have frame inside frame
browser.switch_to.frame(browser.find_element_by_id("inner-iframe"))
with pytest.raises(NoSuchElementException):
assert not browser.find_element_by_id("inner-iframe")
browser.switch_to.default_content()
# Clicking on link with target=_top
browser.switch_to.frame(browser.find_element_by_id("inner-iframe"))
with WaitForPageLoad(browser):
browser.find_element_by_id("link_to_top").click()
assert "wrapper_nonce" not in getContextUrl(browser) # The browser object back to default content
assert "Forbidden" not in browser.page_source
browser.switch_to.default_content()
# Try to escape from inner_frame
browser.switch_to.frame(browser.find_element_by_id("inner-iframe"))
assert "wrapper_nonce" in getContextUrl(browser) # Make sure we are inside of the inner-iframe
with WaitForPageLoad(browser):
browser.execute_script("window.top.location = window.location")
assert "wrapper_nonce" in getContextUrl(browser) # We try to use nonce-ed html without iframe
assert "<iframe" in browser.page_source # Only allow to use nonce once-time
browser.switch_to.default_content()
def testRaw(self, browser, site_url):
browser.get("%s/raw/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/test/security.html" % site_url)
WebDriverWait(browser, 10).until(title_is("Security tests"))
assert getContextUrl(browser) == "%s/raw/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/test/security.html" % site_url
assert browser.find_element_by_id("script_output").text == "Result: Fail"

View File

@ -0,0 +1,128 @@
import pytest
from Worker import WorkerTaskManager
from . import Spy
class TestUiWebsocket:
def checkSort(self, tasks): # Check if it has the same order as a list sorted separately
tasks_list = list(tasks)
tasks_list.sort(key=lambda task: task["id"])
assert tasks_list != list(tasks)
tasks_list.sort(key=lambda task: (0 - (task["priority"] - task["workers_num"] * 10), task["id"]))
assert tasks_list == list(tasks)
def testAppendSimple(self):
tasks = WorkerTaskManager.WorkerTaskManager()
tasks.append({"id": 1, "priority": 15, "workers_num": 1, "inner_path": "file1.json"})
tasks.append({"id": 2, "priority": 1, "workers_num": 0, "inner_path": "file2.json"})
tasks.append({"id": 3, "priority": 8, "workers_num": 0, "inner_path": "file3.json"})
assert [task["inner_path"] for task in tasks] == ["file3.json", "file1.json", "file2.json"]
self.checkSort(tasks)
def testAppendMany(self):
tasks = WorkerTaskManager.WorkerTaskManager()
for i in range(1000):
tasks.append({"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i})
assert tasks[0]["inner_path"] == "file39.json"
assert tasks[-1]["inner_path"] == "file980.json"
self.checkSort(tasks)
def testRemove(self):
tasks = WorkerTaskManager.WorkerTaskManager()
for i in range(1000):
tasks.append({"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i})
i = 333
task = {"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i}
assert task in tasks
with Spy.Spy(tasks, "indexSlow") as calls:
tasks.remove(task)
assert len(calls) == 0
assert task not in tasks
# Remove non existent item
with Spy.Spy(tasks, "indexSlow") as calls:
with pytest.raises(ValueError):
tasks.remove(task)
assert len(calls) == 0
self.checkSort(tasks)
def testRemoveAll(self):
tasks = WorkerTaskManager.WorkerTaskManager()
tasks_list = []
for i in range(1000):
task = {"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i}
tasks.append(task)
tasks_list.append(task)
for task in tasks_list:
tasks.remove(task)
assert len(tasks.inner_paths) == 0
assert len(tasks) == 0
def testModify(self):
tasks = WorkerTaskManager.WorkerTaskManager()
for i in range(1000):
tasks.append({"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i})
task = tasks[333]
task["priority"] += 10
with pytest.raises(AssertionError):
self.checkSort(tasks)
with Spy.Spy(tasks, "indexSlow") as calls:
tasks.updateItem(task)
assert len(calls) == 1
assert task in tasks
self.checkSort(tasks)
# Check reorder optimization
with Spy.Spy(tasks, "indexSlow") as calls:
tasks.updateItem(task, "priority", task["priority"] + 10)
assert len(calls) == 0
with Spy.Spy(tasks, "indexSlow") as calls:
tasks.updateItem(task, "priority", task["workers_num"] - 1)
assert len(calls) == 0
self.checkSort(tasks)
def testModifySamePriority(self):
tasks = WorkerTaskManager.WorkerTaskManager()
for i in range(1000):
tasks.append({"id": i, "priority": 10, "workers_num": 5, "inner_path": "file%s.json" % i})
task = tasks[333]
# Check reorder optimization
with Spy.Spy(tasks, "indexSlow") as calls:
tasks.updateItem(task, "priority", task["workers_num"] - 1)
assert len(calls) == 0
def testIn(self):
tasks = WorkerTaskManager.WorkerTaskManager()
i = 1
task = {"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i}
assert task not in tasks
def testFindTask(self):
tasks = WorkerTaskManager.WorkerTaskManager()
for i in range(1000):
tasks.append({"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i})
assert tasks.findTask("file999.json")
assert not tasks.findTask("file-unknown.json")
tasks.remove(tasks.findTask("file999.json"))
assert not tasks.findTask("file999.json")

0
src/Test/__init__.py Normal file
View File

497
src/Test/conftest.py Normal file
View File

@ -0,0 +1,497 @@
import os
import sys
import urllib.request
import time
import logging
import json
import shutil
import gc
import datetime
import atexit
import threading
import socket
import pytest
import mock
import gevent
if "libev" not in str(gevent.config.loop):
# Workaround for random crash when libuv used with threads
gevent.config.loop = "libev-cext"
import gevent.event
from gevent import monkey
monkey.patch_all(thread=False, subprocess=False)
atexit_register = atexit.register
atexit.register = lambda func: "" # Don't register shutdown functions to avoid IO error on exit
def pytest_addoption(parser):
parser.addoption("--slow", action='store_true', default=False, help="Also run slow tests")
def pytest_collection_modifyitems(config, items):
if config.getoption("--slow"):
# --runslow given in cli: do not skip slow tests
return
skip_slow = pytest.mark.skip(reason="need --slow option to run")
for item in items:
if "slow" in item.keywords:
item.add_marker(skip_slow)
# Config
if sys.platform == "win32":
CHROMEDRIVER_PATH = "tools/chrome/chromedriver.exe"
else:
CHROMEDRIVER_PATH = "chromedriver"
SITE_URL = "http://127.0.0.1:43110"
TEST_DATA_PATH = 'src/Test/testdata'
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__) + "/../lib")) # External modules directory
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__) + "/..")) # Imports relative to src dir
from Config import config
config.argv = ["none"] # Dont pass any argv to config parser
config.parse(silent=True, parse_config=False) # Plugins need to access the configuration
config.action = "test"
# Load plugins
from Plugin import PluginManager
config.data_dir = TEST_DATA_PATH # Use test data for unittests
config.debug = True
os.chdir(os.path.abspath(os.path.dirname(__file__) + "/../..")) # Set working dir
all_loaded = PluginManager.plugin_manager.loadPlugins()
assert all_loaded, "Not all plugin loaded successfully"
config.loadPlugins()
config.parse(parse_config=False) # Parse again to add plugin configuration options
config.action = "test"
config.debug = True
config.debug_socket = True # Use test data for unittests
config.verbose = True # Use test data for unittests
config.tor = "disable" # Don't start Tor client
config.trackers = []
config.data_dir = TEST_DATA_PATH # Use test data for unittests
if "ZERONET_LOG_DIR" in os.environ:
config.log_dir = os.environ["ZERONET_LOG_DIR"]
config.initLogging(console_logging=False)
# Set custom formatter with realative time format (via: https://stackoverflow.com/questions/31521859/python-logging-module-time-since-last-log)
time_start = time.time()
class TimeFilter(logging.Filter):
def __init__(self, *args, **kwargs):
self.time_last = time.time()
self.main_thread_id = threading.current_thread().ident
super().__init__(*args, **kwargs)
def filter(self, record):
if threading.current_thread().ident != self.main_thread_id:
record.thread_marker = "T"
record.thread_title = "(Thread#%s)" % self.main_thread_id
else:
record.thread_marker = " "
record.thread_title = ""
since_last = time.time() - self.time_last
if since_last > 0.1:
line_marker = "!"
elif since_last > 0.02:
line_marker = "*"
elif since_last > 0.01:
line_marker = "-"
else:
line_marker = " "
since_start = time.time() - time_start
record.since_start = "%s%.3fs" % (line_marker, since_start)
self.time_last = time.time()
return True
log = logging.getLogger()
fmt = logging.Formatter(fmt='%(since_start)s %(thread_marker)s %(levelname)-8s %(name)s %(message)s %(thread_title)s')
[hndl.addFilter(TimeFilter()) for hndl in log.handlers]
[hndl.setFormatter(fmt) for hndl in log.handlers]
from Site.Site import Site
from Site import SiteManager
from User import UserManager
from File import FileServer
from Connection import ConnectionServer
from Crypt import CryptConnection
from Crypt import CryptBitcoin
from Ui import UiWebsocket
from Tor import TorManager
from Content import ContentDb
from util import RateLimit
from Db import Db
from Debug import Debug
gevent.get_hub().NOT_ERROR += (Debug.Notify,)
def cleanup():
Db.dbCloseAll()
for dir_path in [config.data_dir, config.data_dir + "-temp"]:
if os.path.isdir(dir_path):
for file_name in os.listdir(dir_path):
ext = file_name.rsplit(".", 1)[-1]
if ext not in ["csr", "pem", "srl", "db", "json", "tmp"]:
continue
file_path = dir_path + "/" + file_name
if os.path.isfile(file_path):
os.unlink(file_path)
atexit_register(cleanup)
@pytest.fixture(scope="session")
def resetSettings(request):
open("%s/sites.json" % config.data_dir, "w").write("{}")
open("%s/filters.json" % config.data_dir, "w").write("{}")
open("%s/users.json" % config.data_dir, "w").write("""
{
"15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc": {
"certs": {},
"master_seed": "024bceac1105483d66585d8a60eaf20aa8c3254b0f266e0d626ddb6114e2949a",
"sites": {}
}
}
""")
@pytest.fixture(scope="session")
def resetTempSettings(request):
data_dir_temp = config.data_dir + "-temp"
if not os.path.isdir(data_dir_temp):
os.mkdir(data_dir_temp)
open("%s/sites.json" % data_dir_temp, "w").write("{}")
open("%s/filters.json" % data_dir_temp, "w").write("{}")
open("%s/users.json" % data_dir_temp, "w").write("""
{
"15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc": {
"certs": {},
"master_seed": "024bceac1105483d66585d8a60eaf20aa8c3254b0f266e0d626ddb6114e2949a",
"sites": {}
}
}
""")
def cleanup():
os.unlink("%s/sites.json" % data_dir_temp)
os.unlink("%s/users.json" % data_dir_temp)
os.unlink("%s/filters.json" % data_dir_temp)
request.addfinalizer(cleanup)
@pytest.fixture()
def site(request):
threads_before = [obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet)]
# Reset ratelimit
RateLimit.queue_db = {}
RateLimit.called_db = {}
site = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
# Always use original data
assert "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" in site.storage.getPath("") # Make sure we dont delete everything
shutil.rmtree(site.storage.getPath(""), True)
shutil.copytree(site.storage.getPath("") + "-original", site.storage.getPath(""))
# Add to site manager
SiteManager.site_manager.get("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
site.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
def cleanup():
site.delete()
site.content_manager.contents.db.close("Test cleanup")
site.content_manager.contents.db.timer_check_optional.kill()
SiteManager.site_manager.sites.clear()
db_path = "%s/content.db" % config.data_dir
os.unlink(db_path)
del ContentDb.content_dbs[db_path]
gevent.killall([obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet) and obj not in threads_before])
request.addfinalizer(cleanup)
site.greenlet_manager.stopGreenlets()
site = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") # Create new Site object to load content.json files
if not SiteManager.site_manager.sites:
SiteManager.site_manager.sites = {}
SiteManager.site_manager.sites["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"] = site
site.settings["serving"] = True
return site
@pytest.fixture()
def site_temp(request):
threads_before = [obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet)]
with mock.patch("Config.config.data_dir", config.data_dir + "-temp"):
site_temp = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
site_temp.settings["serving"] = True
site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
def cleanup():
site_temp.delete()
site_temp.content_manager.contents.db.close("Test cleanup")
site_temp.content_manager.contents.db.timer_check_optional.kill()
db_path = "%s-temp/content.db" % config.data_dir
os.unlink(db_path)
del ContentDb.content_dbs[db_path]
gevent.killall([obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet) and obj not in threads_before])
request.addfinalizer(cleanup)
site_temp.log = logging.getLogger("Temp:%s" % site_temp.address_short)
return site_temp
@pytest.fixture(scope="session")
def user():
user = UserManager.user_manager.get()
if not user:
user = UserManager.user_manager.create()
user.sites = {} # Reset user data
return user
@pytest.fixture(scope="session")
def browser(request):
try:
from selenium import webdriver
print("Starting chromedriver...")
options = webdriver.chrome.options.Options()
options.add_argument("--headless")
options.add_argument("--window-size=1920x1080")
options.add_argument("--log-level=1")
browser = webdriver.Chrome(executable_path=CHROMEDRIVER_PATH, service_log_path=os.path.devnull, options=options)
def quit():
browser.quit()
request.addfinalizer(quit)
except Exception as err:
raise pytest.skip("Test requires selenium + chromedriver: %s" % err)
return browser
@pytest.fixture(scope="session")
def site_url():
try:
urllib.request.urlopen(SITE_URL).read()
except Exception as err:
raise pytest.skip("Test requires zeronet client running: %s" % err)
return SITE_URL
@pytest.fixture(params=['ipv4', 'ipv6'])
def file_server(request):
if request.param == "ipv4":
return request.getfixturevalue("file_server4")
else:
return request.getfixturevalue("file_server6")
@pytest.fixture
def file_server4(request):
time.sleep(0.1)
file_server = FileServer("127.0.0.1", 1544)
file_server.ip_external = "1.2.3.4" # Fake external ip
def listen():
ConnectionServer.start(file_server)
ConnectionServer.listen(file_server)
gevent.spawn(listen)
# Wait for port opening
for retry in range(10):
time.sleep(0.1) # Port opening
try:
conn = file_server.getConnection("127.0.0.1", 1544)
conn.close()
break
except Exception as err:
print("FileServer6 startup error", Debug.formatException(err))
assert file_server.running
file_server.ip_incoming = {} # Reset flood protection
def stop():
file_server.stop()
request.addfinalizer(stop)
return file_server
@pytest.fixture
def file_server6(request):
try:
sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
sock.connect(("::1", 80, 1, 1))
has_ipv6 = True
except OSError:
has_ipv6 = False
if not has_ipv6:
pytest.skip("Ipv6 not supported")
time.sleep(0.1)
file_server6 = FileServer("::1", 1544)
file_server6.ip_external = 'fca5:95d6:bfde:d902:8951:276e:1111:a22c' # Fake external ip
def listen():
ConnectionServer.start(file_server6)
ConnectionServer.listen(file_server6)
gevent.spawn(listen)
# Wait for port opening
for retry in range(10):
time.sleep(0.1) # Port opening
try:
conn = file_server6.getConnection("::1", 1544)
conn.close()
break
except Exception as err:
print("FileServer6 startup error", Debug.formatException(err))
assert file_server6.running
file_server6.ip_incoming = {} # Reset flood protection
def stop():
file_server6.stop()
request.addfinalizer(stop)
return file_server6
@pytest.fixture()
def ui_websocket(site, user):
class WsMock:
def __init__(self):
self.result = gevent.event.AsyncResult()
def send(self, data):
logging.debug("WsMock: Set result (data: %s) called by %s" % (data, Debug.formatStack()))
self.result.set(json.loads(data)["result"])
def getResult(self):
logging.debug("WsMock: Get result")
back = self.result.get()
logging.debug("WsMock: Got result (data: %s)" % back)
self.result = gevent.event.AsyncResult()
return back
ws_mock = WsMock()
ui_websocket = UiWebsocket(ws_mock, site, None, user, None)
def testAction(action, *args, **kwargs):
ui_websocket.handleRequest({"id": 0, "cmd": action, "params": list(args) if args else kwargs})
return ui_websocket.ws.getResult()
ui_websocket.testAction = testAction
return ui_websocket
@pytest.fixture(scope="session")
def tor_manager():
try:
tor_manager = TorManager(fileserver_port=1544)
tor_manager.start()
assert tor_manager.conn is not None
tor_manager.startOnions()
except Exception as err:
raise pytest.skip("Test requires Tor with ControlPort: %s, %s" % (config.tor_controller, err))
return tor_manager
@pytest.fixture()
def db(request):
db_path = "%s/zeronet.db" % config.data_dir
schema = {
"db_name": "TestDb",
"db_file": "%s/zeronet.db" % config.data_dir,
"maps": {
"data.json": {
"to_table": [
"test",
{"node": "test", "table": "test_importfilter", "import_cols": ["test_id", "title"]}
]
}
},
"tables": {
"test": {
"cols": [
["test_id", "INTEGER"],
["title", "TEXT"],
["json_id", "INTEGER REFERENCES json (json_id)"]
],
"indexes": ["CREATE UNIQUE INDEX test_id ON test(test_id)"],
"schema_changed": 1426195822
},
"test_importfilter": {
"cols": [
["test_id", "INTEGER"],
["title", "TEXT"],
["json_id", "INTEGER REFERENCES json (json_id)"]
],
"indexes": ["CREATE UNIQUE INDEX test_importfilter_id ON test_importfilter(test_id)"],
"schema_changed": 1426195822
}
}
}
if os.path.isfile(db_path):
os.unlink(db_path)
db = Db.Db(schema, db_path)
db.checkTables()
def stop():
db.close("Test db cleanup")
os.unlink(db_path)
request.addfinalizer(stop)
return db
@pytest.fixture(params=["sslcrypto", "sslcrypto_fallback", "libsecp256k1"])
def crypt_bitcoin_lib(request, monkeypatch):
monkeypatch.setattr(CryptBitcoin, "lib_verify_best", request.param)
CryptBitcoin.loadLib(request.param)
return CryptBitcoin
@pytest.fixture(scope='function', autouse=True)
def logCaseStart(request):
global time_start
time_start = time.time()
logging.debug("---- Start test case: %s ----" % request._pyfuncitem)
yield None # Wait until all test done
# Workaround for pytest bug when logging in atexit/post-fixture handlers (I/O operation on closed file)
def workaroundPytestLogError():
import _pytest.capture
write_original = _pytest.capture.EncodedFile.write
def write_patched(obj, *args, **kwargs):
try:
write_original(obj, *args, **kwargs)
except ValueError as err:
if str(err) == "I/O operation on closed file":
pass
else:
raise err
def flush_patched(obj, *args, **kwargs):
try:
obj.buffer.flush(*args, **kwargs)
except ValueError as err:
if str(err).startswith("I/O operation on closed file"):
pass
else:
raise err
_pytest.capture.EncodedFile.write = write_patched
_pytest.capture.EncodedFile.flush = flush_patched
workaroundPytestLogError()
@pytest.fixture(scope='session', autouse=True)
def disableLog():
yield None # Wait until all test done
logging.getLogger('').setLevel(logging.getLevelName(logging.CRITICAL))

15
src/Test/coverage.ini Normal file
View File

@ -0,0 +1,15 @@
[run]
branch = True
concurrency = gevent
omit =
src/lib/*
src/Test/*
[report]
exclude_lines =
pragma: no cover
if __name__ == .__main__.:
if config.debug:
if config.debug_socket:
if self.logging:
def __repr__

6
src/Test/pytest.ini Normal file
View File

@ -0,0 +1,6 @@
[pytest]
python_files = Test*.py
addopts = -rsxX -v --durations=6 --no-print-logs --capture=fd
markers =
slow: mark a tests as slow.
webtest: mark a test as a webtest.

View File

@ -0,0 +1,133 @@
{
"address": "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT",
"background-color": "white",
"description": "Blogging platform Demo",
"domain": "Blog.ZeroNetwork.bit",
"files": {
"css/all.css": {
"sha512": "65ddd3a2071a0f48c34783aa3b1bde4424bdea344630af05a237557a62bd55dc",
"size": 112710
},
"data-default/data.json": {
"sha512": "3f5c5a220bde41b464ab116cce0bd670dd0b4ff5fe4a73d1dffc4719140038f2",
"size": 196
},
"data-default/users/content-default.json": {
"sha512": "0603ce08f7abb92b3840ad0cf40e95ea0b3ed3511b31524d4d70e88adba83daa",
"size": 679
},
"data/data.json": {
"sha512": "0f2321c905b761a05c360a389e1de149d952b16097c4ccf8310158356e85fb52",
"size": 31126
},
"data/img/autoupdate.png": {
"sha512": "d2b4dc8e0da2861ea051c0c13490a4eccf8933d77383a5b43de447c49d816e71",
"size": 24460
},
"data/img/direct_domains.png": {
"sha512": "5f14b30c1852735ab329b22496b1e2ea751cb04704789443ad73a70587c59719",
"size": 16185
},
"data/img/domain.png": {
"sha512": "ce87e0831f4d1e95a95d7120ca4d33f8273c6fce9f5bbedf7209396ea0b57b6a",
"size": 11881
},
"data/img/memory.png": {
"sha512": "dd56515085b4a79b5809716f76f267ec3a204be3ee0d215591a77bf0f390fa4e",
"size": 12775
},
"data/img/multiuser.png": {
"sha512": "88e3f795f9b86583640867897de6efc14e1aa42f93e848ed1645213e6cc210c6",
"size": 29480
},
"data/img/progressbar.png": {
"sha512": "23d592ae386ce14158cec34d32a3556771725e331c14d5a4905c59e0fe980ebf",
"size": 13294
},
"data/img/slides.png": {
"sha512": "1933db3b90ab93465befa1bd0843babe38173975e306286e08151be9992f767e",
"size": 14439
},
"data/img/slots_memory.png": {
"sha512": "82a250e6da909d7f66341e5b5c443353958f86728cd3f06e988b6441e6847c29",
"size": 9488
},
"data/img/trayicon.png": {
"sha512": "e7ae65bf280f13fb7175c1293dad7d18f1fcb186ebc9e1e33850cdaccb897b8f",
"size": 19040
},
"dbschema.json": {
"sha512": "2e9466d8aa1f340c91203b4ddbe9b6669879616a1b8e9571058a74195937598d",
"size": 1527
},
"img/loading.gif": {
"sha512": "8a42b98962faea74618113166886be488c09dad10ca47fe97005edc5fb40cc00",
"size": 723
},
"index.html": {
"sha512": "c4039ebfc4cb6f116cac05e803a18644ed70404474a572f0d8473f4572f05df3",
"size": 4667
},
"js/all.js": {
"sha512": "034c97535f3c9b3fbebf2dcf61a38711dae762acf1a99168ae7ddc7e265f582c",
"size": 201178
}
},
"files_optional": {
"data/img/zeroblog-comments.png": {
"sha512": "efe4e815a260e555303e5c49e550a689d27a8361f64667bd4a91dbcccb83d2b4",
"size": 24001
},
"data/img/zeroid.png": {
"sha512": "b46d541a9e51ba2ddc8a49955b7debbc3b45fd13467d3c20ef104e9d938d052b",
"size": 18875
},
"data/img/zeroname.png": {
"sha512": "bab45a1bb2087b64e4f69f756b2ffa5ad39b7fdc48c83609cdde44028a7a155d",
"size": 36031
},
"data/img/zerotalk-mark.png": {
"sha512": "a335b2fedeb8d291ca68d3091f567c180628e80f41de4331a5feb19601d078af",
"size": 44862
},
"data/img/zerotalk-upvote.png": {
"sha512": "b1ffd7f948b4f99248dde7efe256c2efdfd997f7e876fb9734f986ef2b561732",
"size": 41092
},
"data/img/zerotalk.png": {
"sha512": "54d10497a1ffca9a4780092fd1bd158c15f639856d654d2eb33a42f9d8e33cd8",
"size": 26606
},
"data/optional.txt": {
"sha512": "c6f81db0e9f8206c971c9e5826e3ba823ffbb1a3a900f8047652a8bf78ea98fd",
"size": 6
}
},
"ignore": "((js|css)/(?!all.(js|css))|data/.*db|data/users/.*/.*|data/test_include/.*)",
"includes": {
"data/test_include/content.json": {
"added": 1424976057,
"files_allowed": "data.json",
"includes_allowed": false,
"max_size": 20000,
"signers": ["15ik6LeBWnACWfaika1xqGapRZ1zh3JpCo"],
"signers_required": 1,
"user_id": 47,
"user_name": "test"
},
"data/users/content.json": {
"signers": ["1LSxsKfC9S9TVXGGNSM3vPHjyW82jgCX5f"],
"signers_required": 1
}
},
"inner_path": "content.json",
"modified": 1503257990,
"optional": "(data/img/zero.*|data/optional.*)",
"signers_sign": "HDNmWJHM2diYln4pkdL+qYOvgE7MdwayzeG+xEUZBgp1HtOjBJS+knDEVQsBkjcOPicDG2it1r6R1eQrmogqSP0=",
"signs": {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": "G4Uq365UBliQG66ygip1jNGYqW6Eh9Mm7nLguDFqAgk/Hksq/ruqMf9rXv78mgUfPBvL2+XgDKYvFDtlykPFZxk="
},
"signs_required": 1,
"title": "ZeroBlog",
"zeronet_version": "0.5.7"
}

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,10 @@
{
"title": "MyZeroBlog",
"description": "My ZeroBlog.",
"links": "- [Source code](https://github.com/HelloZeroNet)",
"next_post_id": 1,
"demo": false,
"modified": 1432515193,
"post": [
]
}

View File

@ -0,0 +1,25 @@
{
"files": {},
"ignore": ".*",
"modified": 1432466966.003,
"signs": {
"1BLogC9LN4oPDcruNz3qo1ysa133E9AGg8": "HChU28lG4MCnAiui6wDAaVCD4QUrgSy4zZ67+MMHidcUJRkLGnO3j4Eb1N0AWQ86nhSBwoOQf08Rha7gRyTDlAk="
},
"user_contents": {
"cert_signers": {
"zeroid.bit": [ "1iD5ZQJMNXu43w1qLB8sfdHVKppVMduGz" ]
},
"permission_rules": {
".*": {
"files_allowed": "data.json",
"max_size": 10000
},
"bitid/.*@zeroid.bit": { "max_size": 40000 },
"bitmsg/.*@zeroid.bit": { "max_size": 15000 }
},
"permissions": {
"banexample@zeroid.bit": false,
"nofish@zeroid.bit": { "max_size": 20000 }
}
}
}

View File

@ -0,0 +1,244 @@
{
"title": "ZeroBlog",
"description": "Demo for decentralized, self publishing blogging platform.",
"links": "- [Source code](https://github.com/HelloZeroNet)\n- [Create new blog](?Post:3:How+to+have+a+blog+like+this)",
"next_post_id": 42,
"demo": false,
"modified": 1433033806,
"post": [
{
"post_id": 41,
"title": "Changelog: May 31, 2015",
"date_published": 1433033779.604,
"body": " - rev194\n - Ugly OpenSSL memory leak fix\n - Added Docker and Vargant files (thanks to n3r0-ch)\n\nZeroBlog\n - Comment editing, Deleting, Replying added\n\nNew official site: http://zeronet.io/"
},
{
"post_id": 40,
"title": "Trusted authorization providers",
"date_published": 1432549828.319,
"body": "What is it good for?\n\n - It allows you to have multi-user sites without need of a bot that listen to new user registration requests.\n - You can use the same username across sites\n - The site owner can give you (or revoke) permissions based on your ZeroID username\n\nHow does it works?\n\n - You visit an authorization provider site (eg zeroid.bit)\n - You enter the username you want to register and sent the request to the authorization provider site owner (zeroid supports bitmessage and simple http request).\n - The authorization provider process your request and it he finds everything all right (unique username, other anti-spam methods) he sends you a certificate for the username registration.\n - If a site trust your authorization provider you can post your own content (comments, topics, upvotes, etc.) using this certificate without ever contacting the site owner.\n\nWhat sites currently supports ZeroID?\n\n - You can post comments to ZeroBlog using your ZeroID\n - Later, if everyone is updated to 0.3.0 a new ZeroTalk is also planned that supports ZeroID certificates\n\nWhy is it necessary?\n\n - To have some kind of control over the users of your site. (eg. remove misbehaving users)\n\nOther info\n\n - ZeroID is a standard site, anyone can clone it and have his/her own one\n - You can stop seeding ZeroID site after you got your cert"
},
{
"post_id": 39,
"title": "Changelog: May 25, 2015",
"date_published": 1432511642.167,
"body": "- Version 0.3.0, rev187\n- Trusted authorization provider support: Easier multi-user sites by allowing site owners to define tusted third-party user certificate signers. (more info about it in the next days)\n- `--publish` option to siteSign to publish automatically after the new files signed.\n- `cryptSign` command line command to sign message using private key.\n- New, more stable OpenSSL layer that also works on OSX.\n- New json table format support.\n- DbCursor SELECT parameters bugfix.\n- Faster multi-threaded peer discovery from trackers.\n- New http trackers added.\n- Wait for dbschema.json file to execute query.\n- Handle json import errors.\n- More compact json writeJson storage command output.\n- Workaround to make non target=_top links work.\n- Cleaner UiWebsocket command router.\n- Notify other local users on local file changes.\n- Option to wait file download before execute query.\n- fileRules, certAdd, certSelect, certSet websocket API commands.\n- Allow more file errors on big sites.\n- On stucked downloads skip worker's current file instead of stopping it.\n- NoParallel parameter bugfix.\n- RateLimit interval bugfix.\n- Updater skips non-writeable files.\n- Try to close OpenSSL dll before update.\n\nZeroBlog:\n- Rewritten to use SQL database\n- Commenting on posts (**Please note: The comment publishing and distribution can be slow until most of the clients is not updated to version 0.3.0**)\n\n![comments](data/img/zeroblog-comments.png)\n\nZeroID\n- Sample Trusted authorization provider site with Bitmessage registration support\n\n![comments](data/img/zeroid.png)"
},
{
"post_id": 38,
"title": "Status report: Trusted authorization providers",
"date_published": 1431286381.226,
"body": "Currently working on a new feature that allows to create multi-user sites more easily. For example it will allows us to have comments on ZeroBlog (without contacting the site owner).\n\nCurrent status:\n\n - Sign/verification process: 90%\n - Sample trusted authorization provider site: 70%\n - ZeroBlog modifications: 30%\n - Authorization UI enhacements: 10%\n - Total progress: 60%\n \nEta.: 1-2weeks\n\n### Update: May 18, 2015:\n\nThings left:\n - More ZeroBlog modifications on commenting interface\n - Bitmessage support in Sample trusted authorization provider site\n - Test everything on multiple platform/browser and machine\n - Total progress: 80%\n\nIf no major flaw discovered it should be out this week."
},
{
"post_id": 37,
"title": "Changelog: May 3, 2015",
"date_published": 1430652299.794,
"body": " - rev134\n - Removed ZeroMQ dependencies and support (if you are on pre 0.2.0 version please, upgrade)\n - Save CPU and memory on file requests by streaming content directly to socket without loading to memory and encoding with msgpack.\n - Sites updates without re-download all content.json by querying the modified files from peers.\n - Fix urllib memory leak\n - SiteManager testsuite\n - Fix UiServer security testsuite\n - Announce to tracker on site resume\n\nZeroBoard:\n\n - Only last 100 messages loaded by default\n - Typo fix"
},
{
"post_id": 36,
"title": "Changelog: Apr 29, 2015",
"date_published": 1430388168.315,
"body": " - rev126\n - You can install the \"127.0.0.1:43110-less\" extension from [Chrome Web Store](https://chrome.google.com/webstore/detail/zeronet-protocol/cpkpdcdljfbnepgfejplkhdnopniieop). (thanks to g0ld3nrati0!)\n - You can disable the use of openssl using `--use_openssl False`\n - OpenSSL disabled on OSX because of possible segfault. You can enable it again using `zeronet.py --use_openssl True`,<br> please [give your feedback](https://github.com/HelloZeroNet/ZeroNet/issues/94)!\n - Update on non existent file bugfix\n - Save 20% memory using Python slots\n\n![Memory save](data/img/slots_memory.png)"
},
{
"post_id": 35,
"title": "Changelog: Apr 27, 2015",
"date_published": 1430180561.716,
"body": " - Revision 122\n - 40x faster signature verification by using OpenSSL if available\n - Added OpenSSL benchmark: beat my CPU at http://127.0.0.1:43110/Benchmark :)\n - Fixed UiServer socket memory leak"
},
{
"post_id": 34,
"title": "Slides about ZeroNet",
"date_published": 1430081791.43,
"body": "Topics:\n - ZeroNet cryptography\n - How site downloading works\n - Site updates\n - Multi-user sites\n - Current status of the project / Future plans\n\n<a href=\"https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000&slide=id.g9a1cce9ee_0_4\"><img src=\"data/img/slides.png\"/></a>\n\n[Any feedback is welcome!](http://127.0.0.1:43110/Talk.ZeroNetwork.bit/?Topic:18@2/Presentation+about+how+ZeroNet+works) \n\nThanks! :)"
},
{
"post_id": 33,
"title": "Changelog: Apr 24, 2014",
"date_published": 1429873756.187,
"body": " - Revision 120\n - Batched publishing to avoid update flood: Only send one update in every 7 seconds\n - Protection against update flood by adding update queue: Only allows 1 update in every 10 second for the same file\n - Fix stucked notification icon\n - Fix websocket error when writing to not-owned sites"
},
{
"post_id": 32,
"title": "Changelog: Apr 20, 2014",
"date_published": 1429572874,
"body": " - Revision 115\n - For faster pageload times allow browser cache on css/js/font files\n - Support for experimental chrome extension that allows to browse zeronet sites using `http://talk.zeronetwork.bit` and/or `http://zero/1Name2NXVi1RDPDgf5617UoW7xA6YrhM9F`\n - Allow to browse memory content in /Stats\n - Peers uses Site's logger to save some memory\n - Give not-that-good peers on initial PEX if required\n - Allows more than one `--ui_restrict` ip address\n - Disable ssl monkey patching to avoid ssl error in Debian Jessie\n - Fixed websocket error when writing not-allowed files\n - Fixed bigsite file not found error\n - Fixed loading screen stays on screen even after index.html loaded\n\nZeroHello:\n\n - Site links converted to 127.0.0.1:43110 -less if using chrome extension\n\n![direct domains](data/img/direct_domains.png)"
},
{
"post_id": 31,
"title": "Changelog: Apr 17, 2014",
"date_published": 1429319617.201,
"body": " - Revision 101\n - Revision numbering between version\n - Allow passive publishing\n - Start Zeronet when Windows starts option to system tray icon\n - Add peer ping time to publish timeout\n - Passive connected peers always get the updates\n - Pex count bugfix\n - Changed the topright button hamburger utf8 character to more supported one and removed click anim\n - Passive peers only need 3 connection\n - Passive connection store on tracker bugfix\n - Not exits file bugfix\n - You can compare your computer speed (bitcoin crypto, sha512, sqlite access) to mine: http://127.0.0.1:43110/Benchmark :)\n\nZeroTalk:\n\n - Only quote the last message\n - Message height bugfix\n\nZeroHello:\n\n - Changed the burger icon to more supported one\n - Added revision display"
},
{
"post_id": 30,
"title": "Changelog: Apr 16, 2015",
"date_published": 1429135541.581,
"body": "Apr 15:\n\n - Version 0.2.9\n - To get rid of dead ips only send peers over pex that messaged within 2 hour\n - Only ask peers from 2 sources using pex every 20 min\n - Fixed mysterious notification icon disappearings\n - Mark peers as bad if publish is timed out (5s+)"
},
{
"post_id": 29,
"title": "Changelog: Apr 15, 2015",
"date_published": 1429060414.445,
"body": " - Sexy system tray icon with statistics instead of ugly console. (sorry, Windows only yet)\n - Total sent/received bytes stats\n - Faster connections and publishing by don't send passive peers using PEX and don't store them on trackers\n\n![Tray icon](data/img/trayicon.png)"
},
{
"post_id": 28,
"title": "Changelog: Apr 14, 2015",
"date_published": 1428973199.042,
"body": " - Experimental socks proxy support (Tested using Tor)\n - Tracker-less peer exchange between peers\n - Http bittorrent tracker support\n - Option to disable udp connections (udp tracker)\n - Other stability/security fixes\n\nTo use ZeroNet over Tor network start it with `zeronet.py --proxy 127.0.0.1:9050 --disable_udp`\n\nIt's still an experimental feature, there is lot work/fine tuning needed to make it work better and more secure (eg. by supporting hidden service peer addresses to allow connection between Tor clients). \nIn this mode you can only access to sites where there is at least one peer with peer exchange support. (client updated to latest commit)\n\nIf no more bug found i'm going to tag it as 0.2.9 in the next days."
},
{
"post_id": 27,
"title": "Changelog: Apr 9, 2015",
"date_published": 1428626164.266,
"body": " - Packaged windows dependencies for windows to make it easier to install: [ZeroBundle](https://github.com/HelloZeroNet/ZeroBundle)\n - ZeroName site downloaded at startup, so first .bit domain access is faster.\n - Fixed updater bug. (argh)"
},
{
"post_id": 26,
"title": "Changelog: Apr 7, 2015",
"date_published": 1428454413.286,
"body": " - Fix for big sites confirmation display\n - Total objects in memory stat\n - Memory optimizations\n - Retry bad files in every 20min\n - Load files to db when executing external siteSign command\n - Fix for endless reconnect bug\n \nZeroTalk:\n \n - Added experimental P2P new bot\n - Bumped size limit to 20k for every user :)\n - Reply button\n\nExperimenting/researching possibilities of i2p/tor support (probably using DHT)\n\nAny help/suggestion/idea greatly welcomed: [github issue](https://github.com/HelloZeroNet/ZeroNet/issues/60)"
},
{
"post_id": 25,
"title": "Changelog: Apr 2, 2015",
"date_published": 1428022346.555,
"body": " - Better passive mode by making sure to keep 5 active connections\n - Site connection and msgpack unpacker stats\n - No more sha1 hash added to content.json (it was only for backward compatibility with old clients)\n - Keep connection logger object to prevent some exception\n - Retry upnp port opening 3 times\n - Publish received content updates to more peers to make sure the better distribution\n\nZeroTalk: \n\n - Changed edit icon to more clear pencil\n - Single line breaks also breaks the line"
},
{
"post_id": 24,
"title": "Changelog: Mar 29, 2015",
"date_published": 1427758356.109,
"body": " - Version 0.2.8\n - Namecoin (.bit) domain support!\n - Possible to disable backward compatibility with old version to save some memory\n - Faster content publishing (commenting, posting etc.)\n - Display error on internal server errors\n - Better progress bar\n - Crash and bugfixes\n - Removed coppersurfer tracker (its down atm), added eddie4\n - Sorry, the auto updater broken for this version: please overwrite your current `update.py` file with the [latest one from github](https://raw.githubusercontent.com/HelloZeroNet/ZeroNet/master/update.py), run it and restart ZeroNet.\n - Fixed updater\n\n![domain](data/img/domain.png)\n\nZeroName\n\n - New site for resolving namecoin domains and display registered ones\n\n![ZeroName](data/img/zeroname.png)\nZeroHello\n\n - Automatically links to site's domain names if its specificed in content.json `domain` field\n\n"
},
{
"post_id": 22,
"title": "Changelog: Mar 23, 2015",
"date_published": 1427159576.994,
"body": " - Version 0.2.7\n - Plugin system: Allows extend ZeroNet without modify the core source\n - Comes with 3 plugin:\n - Multiuser: User login/logout based on BIP32 master seed, generate new master seed on visit (disabled by default to enable it just remove the disabled- from the directory name)\n - Stats: /Stats url moved to separate plugin for demonstration reasons\n - DonationMessage: Puts a little donation link to the bottom of every page (disabled by default)\n - Reworked module import system\n - Lazy user auth_address generatation\n - Allow to send prompt dialog to user from server-side\n - Update script remembers plugins enabled/disabled status\n - Multiline notifications\n - Cookie parser\n\nZeroHello in multiuser mode:\n\n - Logout button\n - Identicon generated based on logined user xpub address\n\n![Multiuser](data/img/multiuser.png)"
},
{
"post_id": 21,
"title": "Changelog: Mar 19, 2015",
"date_published": 1426818095.915,
"body": " - Version 0.2.6\n - SQL database support that allows easier site development and faster page load times\n - Updated [ZeroFrame API Reference](http://zeronet.readthedocs.org/en/latest/site_development/zeroframe_api_reference/)\n - Added description of new [dbschema.json](http://zeronet.readthedocs.org/en/latest/site_development/dbschema_json/) file\n - SiteStorage class for file operations\n - Incoming connection firstchar errorfix\n - dbRebuild and dbQuery commandline actions\n - [Goals donation page](http://zeronet.readthedocs.org/en/latest/zeronet_development/donate/)\n\nZeroTalk\n\n - Rewritten to use SQL queries (falls back nicely to use json files on older version)"
},
{
"post_id": 20,
"title": "Changelog: Mar 14, 2015",
"date_published": 1426386779.836,
"body": "\n - Save significant amount of memory by remove unused msgpack unpackers\n - Log unhandled exceptions\n - Connection checker error bugfix\n - Working on database support, you can follow the progress on [reddit](http://www.reddit.com/r/zeronet/comments/2yq7e8/a_json_caching_layer_for_quicker_development_and/)\n\n![memory usage](data/img/memory.png)"
},
{
"post_id": 19,
"title": "Changelog: Mar 10, 2015",
"date_published": 1426041044.008,
"body": " - Fixed ZeroBoard and ZeroTalk registration: It was down last days, sorry, I haven't tested it after recent modifications, but I promise I will from now :)\n - Working hard on documentations, after trying some possibilities, I chosen readthedocs.org: http://zeronet.readthedocs.org\n - The API reference is now up-to-date, documented demo sites working method and also updated other parts\n\n[Please, tell me what you want to see in the docs, Thanks!](/1TaLk3zM7ZRskJvrh3ZNCDVGXvkJusPKQ/?Topic:14@2/New+ZeroNet+documentation)"
},
{
"post_id": 18,
"title": "Changelog: Mar 8, 2015",
"date_published": 1425865493.306,
"body": " - [Better uPnp Puncher](https://github.com/HelloZeroNet/ZeroNet/blob/master/src/util/UpnpPunch.py), if you have problems with port opening please try this.\n\nZeroTalk: \n - Comment upvoting\n - Topic groups, if you know any other article about ZeroNet please, post [here](/1TaLk3zM7ZRskJvrh3ZNCDVGXvkJusPKQ/?Topics:8@2/Articles+about+ZeroNet)"
},
{
"post_id": 17,
"title": "Changelog: Mar 5, 2015",
"date_published": 1425606285.111,
"body": " - Connection pinging and timeout\n - Request timeout\n - Verify content at signing (size, allowed files)\n - Smarter coffeescript recompile\n - More detailed stats\n\nZeroTalk: \n - Topic upvote\n - Even more source code realign\n\n![ZeroTalk upvote](data/img/zerotalk-upvote.png)"
},
{
"post_id": 16,
"title": "Changelog: Mar 1, 2015",
"date_published": 1425259087.503,
"body": "ZeroTalk: \n - Reordered source code to allow more more feature in the future\n - Links starting with http://127.0.0.1:43110/ automatically converted to relative links (proxy support)\n - Comment reply (by clicking on comment's creation date)"
},
{
"post_id": 15,
"title": "Changelog: Feb 25, 2015",
"date_published": 1424913197.035,
"body": " - Version 0.2.5\n - Pure-python upnp port opener (Thanks to sirMackk!)\n - Site download progress bar\n - We are also on [Gitter chat](https://gitter.im/HelloZeroNet/ZeroNet)\n - More detailed connection statistics (ping, buff, idle, delay, sent, received)\n - First char failed bugfix\n - Webebsocket disconnect on slow connection bugfix\n - Faster site update\n\n![Progressbar](data/img/progressbar.png)\n\nZeroTalk: \n\n - Sort after 100ms idle\n - Colored usernames\n - Limit reload rate to 500ms\n\nZeroHello\n\n - [iframe render fps test](/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/test/render.html) ([more details on ZeroTalk](/1TaLk3zM7ZRskJvrh3ZNCDVGXvkJusPKQ/?Topic:7@2/Slow+rendering+in+Chrome))\n"
},
{
"post_id": 14,
"title": "Changelog: Feb 24, 2015",
"date_published": 1424734437.473,
"body": " - Version 0.2.4\n - New, experimental network code and protocol\n - peerPing and peerGetFile commands\n - Connection share and reuse between sites\n - Don't retry bad file more than 3 times in 20 min\n - Multi-threaded include file download\n - Really shuffle peers before publish\n - Simple internal stats page: http://127.0.0.1:43110/Stats\n - Publish bugfix for sites with more then 10 peers\n\n_If someone on very limited resources its recommended to wait some time until most of the peers is updates to new network code, because the backward compatibility is a little bit tricky and using more memory._"
},
{
"post_id": 13,
"title": "Changelog: Feb 19, 2015",
"date_published": 1424394659.345,
"body": " - Version 0.2.3\n - One click source code download from github, auto unpack and restart \n - Randomize peers before publish and work start\n - Switched to upnpc-shared.exe it has better virustotal reputation (4/53 vs 19/57)\n\n![Autoupdate](data/img/autoupdate.png)\n\nZeroTalk:\n\n - Topics also sorted by topic creation date\n\n_New content and file changes propagation is a bit broken yet. Already working on better network code that also allows passive content publishing. It will be out in 1-2 weeks._"
},
{
"post_id": 12,
"title": "Changelog: Feb 16, 2015",
"date_published": 1424134864.167,
"body": "Feb 16: \n - Version 0.2.2\n - LocalStorage support using WrapperAPI\n - Bugfix in user management\n\nZeroTalk: \n - Topics ordered by date of last post\n - Mark updated topics since your last visit\n\n![Mark](data/img/zerotalk-mark.png)"
},
{
"post_id": 11,
"title": "Changelog: Feb 14, 2015",
"date_published": 1423922572.778,
"body": " - Version 0.2.1\n - Site size limit: Default 10MB, asks permission to store more, test it here: [ZeroNet windows requirement](/1ZeroPYmW4BGwmT6Z54jwPgTWpbKXtTra)\n - Browser open wait until UiServer started\n - Peer numbers stored in sites.json for faster warmup\n - Silent WSGIHandler error\n - siteSetLimit WrapperAPI command\n - Grand ADMIN permission to wrapperframe\n\nZeroHello: \n\n - Site modify time also include sub-file changes (ZeroTalk last comment)\n - Better changetime date format"
},
{
"post_id": 10,
"title": "Changelog: Feb 11, 2015",
"date_published": 1423701015.643,
"body": "ZeroTalk:\n - Link-type posts\n - You can Edit or Delete your previous Comments and Topics\n - [Uploaded source code to github](https://github.com/HelloZeroNet/ZeroTalk)"
},
{
"post_id": 9,
"title": "Changelog: Feb 10, 2015",
"date_published": 1423532194.094,
"body": " - Progressive publish timeout based on file size\n - Better tracker error log\n - Viewport support in content.json and ZeroFrame API to allow better mobile device layout\n - Escape ZeroFrame notification messages to avoid js injection\n - Allow select all data in QueryJson\n\nZeroTalk:\n - Display topic's comment number and last comment time (requires ZeroNet today's commits from github)\n - Mobile device optimized layout"
},
{
"post_id": 8,
"title": "Changelog: Feb 9, 2015",
"date_published": 1423522387.728,
"body": " - Version 0.2.0\n - New bitcoin ECC lib (pybitcointools)\n - Hide notify errors\n - Include support for content.json\n - File permissions (signer address, filesize, allowed filenames)\n - Multisig ready, new, Bitcoincore compatible sign format\n - Faster, multi threaded content publishing\n - Multiuser, ready, BIP32 based site auth using bitcoin address/privatekey\n - Simple json file query language\n - Websocket api fileGet support\n\nZeroTalk: \n - [Decentralized forum demo](/1TaLk3zM7ZRskJvrh3ZNCDVGXvkJusPKQ/?Home)\n - Permission request/username registration\n - Everyone has an own file that he able to modify, sign and publish decentralized way, without contacting the site owner\n - Topic creation\n - Per topic commenting\n\n![ZeroTalk screenshot](data/img/zerotalk.png)"
},
{
"post_id": 7,
"title": "Changelog: Jan 29, 2015",
"date_published": 1422664081.662,
"body": "The default tracker (tracker.pomf.se) is down since yesterday and its resulting some warning messages. To make it disappear please update to latest version from [GitHub](https://github.com/HelloZeroNet/ZeroNet).\n\nZeroNet:\n- Added better tracker error handling\n- Updated alive [trackers list](https://github.com/HelloZeroNet/ZeroNet/blob/master/src/Site/SiteManager.py) (if anyone have more, please [let us know](http://www.reddit.com/r/zeronet/comments/2sgjsp/changelog/co5y07h))\n\nIf you want to stay updated about the project status: <br>\nWe have created a [@HelloZeronet](https://twitter.com/HelloZeroNet) Twitter account"
},
{
"post_id": 6,
"title": "Changelog: Jan 27, 2015",
"date_published": 1422394676.432,
"body": "ZeroNet\n* You can use `start.py` to start zeronet and open in browser automatically\n* Send timeout 50sec (workaround for some strange problems until we rewrite the network code without zeromq)\n* Reworked Websocket API to make it unified and allow named and unnamed parameters\n* Reload `content.json` when changed using fileWrite API command\n* Some typo fix\n\nZeroBlog\n* Allow edit post on mainpage\n* Also change blog title in `content.json` when modified using inline editor\n\nZeroHello\n* Update failed warning changed to No peers found when seeding own site."
},
{
"post_id": 4,
"title": "Changelog: Jan 25, 2015",
"date_published": 1422224700.583,
"body": "ZeroNet\n- Utf-8 site titles fixed\n- Changes in DebugMedia merger to allow faster, node.js based coffeescript compiler\n\nZeroBlog\n- Inline editor rewritten to simple textarea, so copy/paste, undo/redo now working correctly\n- Read more button to folded posts with `---`\n- ZeroBlog running in demo mode, so anyone can try the editing tools\n- Base html tag fixed\n- Markdown cheat-sheet\n- Confirmation if you want to close the browser tab while editing\n\nHow to update your running blog?\n- Backup your `content.json` and `data.json` files\n- Copy the files in the `data/1BLogC9LN4oPDcruNz3qo1ysa133E9AGg8` directory to your site.\n"
},
{
"post_id": 3,
"title": "How to have a blog like this",
"date_published": 1422140400,
"body": "* Stop ZeroNet\n* Create a new site using `python zeronet.py siteCreate` command\n* Copy all file from **data/1BLogC9LN4oPDcruNz3qo1ysa133E9AGg8** to **data/[Your new site address displayed when executed siteCreate]** directory\n* Delete **data** directory and rename **data-default** to **data** to get a clean, empty site\n* Rename **data/users/content-default.json** file to **data/users/content.json**\n* Execute `zeronet.py siteSign [yoursiteaddress] --inner_path data/users/content.json` to sign commenting rules\n* Start ZeroNet\n* Add/Modify content\n* Click on the `Sign & Publish new content` button\n* Congratulations! Your site is ready to access.\n\n_Note: You have to start commands with `..\\python\\python zeronet.py...` if you downloaded ZeroBundle package_"
},
{
"post_id": 2,
"title": "Changelog: Jan 24, 2015",
"date_published": 1422105774.057,
"body": "* Version 0.1.6\n* Only serve .html files with wrapper frame\n* Http parameter support in url\n* Customizable background-color for wrapper in content.json\n* New Websocket API commands (only allowed on own sites):\n - fileWrite: Modify site's files in hdd from javascript\n - sitePublish: Sign new content and Publish to peers\n* Prompt value support in ZeroFrame (used for prompting privatekey for publishing in ZeroBlog)\n\n---\n\n## Previous changes:\n\n### Jan 20, 2014\n- Version 0.1.5\n- Detect computer wakeup from sleep and acts as startup (check open port, site changes)\n- Announce interval changed from 10min to 20min\n- Delete site files command support\n- Stop unfinished downloads on pause, delete\n- Confirm dialog support to WrapperApi\n\nZeroHello\n- Site Delete menuitem\n- Browser back button doesn't jumps to top\n\n### Jan 19, 2014:\n- Version 0.1.4\n- WIF compatible new private addresses\n- Proper bitcoin address verification, vanity address support: http://127.0.0.1:43110/1ZEro9ZwiZeEveFhcnubFLiN3v7tDL4bz\n- No hash error on worker kill\n- Have you secured your private key? confirmation\n\n### Jan 18, 2014:\n- Version 0.1.3\n- content.json hashing changed from sha1 to sha512 (trimmed to 256bits) for better security, keep hasing to sha1 for backward compatiblility yet\n- Fixed fileserver_port argument parsing\n- Try to ping peer before asking any command if no communication for 20min\n- Ping timeout / retry\n- Reduce websocket bw usage\n- Separate wrapper_key for websocket auth and auth_key to identify user\n- Removed unnecessary from wrapper iframe url\n\nZeroHello:\n- Compatiblilty with 0.1.3 websocket changes while maintaining backward compatibility\n- Better error report on file update fail\n\nZeroBoard:\n- Support for sha512 hashed auth_key, but keeping md5 key support for older versions yet\n\n### Jan 17, 2014:\n- Version 0.1.2\n- Better error message logging\n- Kill workers on download done\n- Retry on socket error\n- Timestamping console messages\n\n### Jan 16:\n- Version to 0.1.1\n- Version info to websocket api\n- Add publisher's zeronet version to content.json\n- Still chasing network publish problems, added more debug info\n\nZeroHello:\n- Your and the latest ZeroNet version added to top right corner (please update if you dont see it)\n"
},
{
"post_id": 1,
"title": "ZeroBlog features",
"date_published": 1422105061,
"body": "Initial version (Jan 24, 2014):\n\n* Site avatar generated by site address\n* Distraction-free inline edit: Post title, date, body, Site title, description, links\n* Post format using [markdown](https://github.com/adam-p/markdown-here/wiki/Markdown-Cheatsheet)\n* Code block [syntax highlight](#code-highlight-demos) using [highlight.js](https://highlightjs.org/)\n* Create & Delete post\n* Sign & Publish from web\n* Fold blog post: Content after first `---` won't appear at listing\n* Shareable, friendly post urls\n\n\nTodo:\n\n* ~~Better content editor (contenteditable seemed like a good idea, but tricky support of copy/paste makes it more pain than gain)~~\n* Image upload to post & blog avatar\n* Paging\n* Searching\n* ~~Quick cheat-sheet using markdown~~\n\n---\n\n## Code highlight demos\n### Server-side site publishing (UiWebsocket.py):\n```py\ndef actionSitePublish(self, to, params):\n\tsite = self.site\n\tif not site.settings[\"own\"]: return self.response(to, \"Forbidden, you can only modify your own sites\")\n\n\t# Signing\n\tsite.loadContent(True) # Reload content.json, ignore errors to make it up-to-date\n\tsigned = site.signContent(params[0]) # Sign using private key sent by user\n\tif signed:\n\t\tself.cmd(\"notification\", [\"done\", \"Private key correct, site signed!\", 5000]) # Display message for 5 sec\n\telse:\n\t\tself.cmd(\"notification\", [\"error\", \"Site sign failed: invalid private key.\"])\n\t\tself.response(to, \"Site sign failed\")\n\t\treturn\n\tsite.loadContent(True) # Load new content.json, ignore errors\n\n\t# Publishing\n\tif not site.settings[\"serving\"]: # Enable site if paused\n\t\tsite.settings[\"serving\"] = True\n\t\tsite.saveSettings()\n\t\tsite.announce()\n\n\tpublished = site.publish(5) # Publish to 5 peer\n\n\tif published>0: # Successfuly published\n\t\tself.cmd(\"notification\", [\"done\", \"Site published to %s peers.\" % published, 5000])\n\t\tself.response(to, \"ok\")\n\t\tsite.updateWebsocket() # Send updated site data to local websocket clients\n\telse:\n\t\tif len(site.peers) == 0:\n\t\t\tself.cmd(\"notification\", [\"info\", \"No peers found, but your site is ready to access.\"])\n\t\t\tself.response(to, \"No peers found, but your site is ready to access.\")\n\t\telse:\n\t\t\tself.cmd(\"notification\", [\"error\", \"Site publish failed.\"])\n\t\t\tself.response(to, \"Site publish failed.\")\n```\n\n\n### Client-side site publish (ZeroBlog.coffee)\n```coffee\n# Sign and Publish site\npublish: =>\n\tif not @server_info.ip_external # No port open\n\t\t@cmd \"wrapperNotification\", [\"error\", \"To publish the site please open port <b>#{@server_info.fileserver_port}</b> on your router\"]\n\t\treturn false\n\t@cmd \"wrapperPrompt\", [\"Enter your private key:\", \"password\"], (privatekey) => # Prompt the private key\n\t\t$(\".publishbar .button\").addClass(\"loading\")\n\t\t@cmd \"sitePublish\", [privatekey], (res) =>\n\t\t\t$(\".publishbar .button\").removeClass(\"loading\")\n\t\t\t@log \"Publish result:\", res\n\n\treturn false # Ignore link default event\n```\n\n"
}
]
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 29 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 23 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 35 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

View File

@ -0,0 +1 @@
hello!

View File

@ -0,0 +1,14 @@
{
"address": "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT",
"files": {
"data.json": {
"sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906",
"size": 505
}
},
"inner_path": "data/test_include/content.json",
"modified": 1470340816.513,
"signs": {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": "GxF2ZD0DaMx+CuxafnnRx+IkWTrXubcmTHaJIPyemFpzCvbSo6DyjstN8T3qngFhYIZI/MkcG4ogStG0PLv6p3w="
}
}

View File

@ -0,0 +1,37 @@
{
"next_topic_id": 1,
"topics": [],
"next_message_id": 5,
"comments": {
"1@2": [
{
"comment_id": 1,
"body": "New user test!",
"added": 1423442049
},
{
"comment_id": 2,
"body": "test 321",
"added": 1423531445
},
{
"comment_id": 3,
"body": "0.2.4 test.",
"added": 1424133003
}
]
},
"topic_votes": {
"1@2": 1,
"1@6": 1,
"1@69": 1,
"607@69": 1
},
"comment_votes": {
"35@2": 1,
"7@64": 1,
"8@64": 1,
"50@2": 1,
"13@77": 1
}
}

View File

@ -0,0 +1,15 @@
{
"cert_auth_type": "web",
"cert_sign": "G4YB7y749GI6mJboyI7cNNfyMwOS0rcVXLmgq8qmCC4TCaRqup3TGWm8hzeru7+B5iXhq19Ruz286bNVKgNbnwU=",
"cert_user_id": "newzeroid@zeroid.bit",
"files": {
"data.json": {
"sha512": "2378ef20379f1db0c3e2a803bfbfda2b68515968b7e311ccc604406168969d34",
"size": 161
}
},
"modified": 1432554679.913,
"signs": {
"1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q": "GzX/Ht6ms1dOnqB3kVENvDnxpH+mqA0Zlg3hWy0iwgxpyxWcA4zgmwxcEH41BN9RrvCaxgSd2m1SG1/8qbQPzDY="
}
}

View File

@ -0,0 +1,12 @@
{
"next_comment_id": 2,
"comment": [
{
"comment_id": 1,
"body": "Test me!",
"post_id": 40,
"date_added": 1432554679
}
],
"comment_vote": {}
}

View File

@ -0,0 +1,24 @@
{
"address": "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT",
"cert_auth_type": "web",
"cert_sign": "HBsTrjTmv+zD1iY93tSci8n9DqdEtYwzxJmRppn4/b+RYktcANGm5tXPOb+Duw3AJcgWDcGUvQVgN1D9QAwIlCw=",
"cert_user_id": "toruser@zeroid.bit",
"files": {
"data.json": {
"sha512": "4868b5e6d70a55d137db71c2e276bda80437e0235ac670962acc238071296b45",
"size": 168
}
},
"files_optional": {
"peanut-butter-jelly-time.gif": {
"sha512": "a238fd27bda2a06f07f9f246954b34dcf82e6472aebdecc2c5dc1f01a50721ef",
"size": 1606
}
},
"inner_path": "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json",
"modified": 1470340817.676,
"optional": ".*\\.(jpg|png|gif)",
"signs": {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": "G6UOG3ne1hVe3mDGXHnWX8A1vKzH0XHD6LGMsshvNFVXGn003IFNLUL9dlb3XXJf3tyJGZncvGobzNpwBib08QY="
}
}

View File

@ -0,0 +1,12 @@
{
"next_comment_id": 2,
"comment": [
{
"comment_id": 1,
"body": "hello from Tor!",
"post_id": 38,
"date_added": 1432491109
}
],
"comment_vote": {}
}

View File

@ -0,0 +1,17 @@
{
"address": "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT",
"cert_auth_type": "web",
"cert_sign": "HBsTrjTmv+zD1iY93tSci8n9DqdEtYwzxJmRppn4/b+RYktcANGm5tXPOb+Duw3AJcgWDcGUvQVgN1D9QAwIlCw=",
"cert_user_id": "toruser@zeroid.bit",
"files": {
"data.json": {
"sha512": "4868b5e6d70a55d137db71c2e276bda80437e0235ac670962acc238071296b45",
"size": 168
}
},
"inner_path": "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json",
"modified": 1470340818.389,
"signs": {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": "G6oCzql6KWKAq2aSmZ1pm4SqvwL3e3LRdWxsvILrDc6VWpGZmVgbNn5qW18bA7fewhtA/oKc5+yYjGlTLLOWrB4="
}
}

View File

@ -0,0 +1,12 @@
{
"next_comment_id": 2,
"comment": [
{
"comment_id": 1,
"body": "hello from Tor!",
"post_id": 38,
"date_added": 1432491109
}
],
"comment_vote": {}
}

View File

@ -0,0 +1,30 @@
{
"address": "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT",
"files": {},
"ignore": ".*",
"inner_path": "data/users/content.json",
"modified": 1470340815.228,
"signs": {
"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": "G25hsrlyTOy8PHKuovKDRC7puoBj/OLIZ3U4OJ01izkhE1BBQ+TOgxX96+HXoZGme2/P4IdEnYjc1rqIZ6O+nFk="
},
"user_contents": {
"cert_signers": {
"zeroid.bit": [ "1iD5ZQJMNXu43w1qLB8sfdHVKppVMduGz" ]
},
"permission_rules": {
".*": {
"files_allowed": "data.json",
"files_allowed_optional": ".*\\.(png|jpg|gif)",
"max_size": 10000,
"max_size_optional": 10000000,
"signers": [ "14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" ]
},
"bitid/.*@zeroid.bit": { "max_size": 40000 },
"bitmsg/.*@zeroid.bit": { "max_size": 15000 }
},
"permissions": {
"bad@zeroid.bit": false,
"nofish@zeroid.bit": { "max_size": 100000 }
}
}
}

View File

@ -0,0 +1,54 @@
{
"db_name": "ZeroBlog",
"db_file": "data/zeroblog.db",
"version": 2,
"maps": {
"users/.+/data.json": {
"to_table": [
"comment",
{"node": "comment_vote", "table": "comment_vote", "key_col": "comment_uri", "val_col": "vote"}
]
},
"users/.+/content.json": {
"to_keyvalue": [ "cert_user_id" ]
},
"data.json": {
"to_table": [ "post" ],
"to_keyvalue": [ "title", "description", "links", "next_post_id", "demo", "modified" ]
}
},
"tables": {
"comment": {
"cols": [
["comment_id", "INTEGER"],
["post_id", "INTEGER"],
["body", "TEXT"],
["date_added", "INTEGER"],
["json_id", "INTEGER REFERENCES json (json_id)"]
],
"indexes": ["CREATE UNIQUE INDEX comment_key ON comment(json_id, comment_id)", "CREATE INDEX comment_post_id ON comment(post_id)"],
"schema_changed": 1426195823
},
"comment_vote": {
"cols": [
["comment_uri", "TEXT"],
["vote", "INTEGER"],
["json_id", "INTEGER REFERENCES json (json_id)"]
],
"indexes": ["CREATE INDEX comment_vote_comment_uri ON comment_vote(comment_uri)", "CREATE INDEX comment_vote_json_id ON comment_vote(json_id)"],
"schema_changed": 1426195822
},
"post": {
"cols": [
["post_id", "INTEGER"],
["title", "TEXT"],
["body", "TEXT"],
["date_published", "INTEGER"],
["json_id", "INTEGER REFERENCES json (json_id)"]
],
"indexes": ["CREATE UNIQUE INDEX post_uri ON post(json_id, post_id)", "CREATE INDEX post_id ON post(post_id)"],
"schema_changed": 1426195823
}
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 723 B

View File

@ -0,0 +1,137 @@
<!DOCTYPE html>
<html>
<head>
<title>ZeroBlog Demo</title>
<meta charset="utf-8">
<meta http-equiv="content-type" content="text/html; charset=utf-8" />
<link rel="stylesheet" href="css/all.css" />
<base href="" target="_top" id="base">
<script>base.href = document.location.href.replace("/media", "").replace("index.html", "").replace(/[&?]wrapper=False/, "") // Make hashtags work</script>
</head>
<body>
<!-- editbar -->
<div class="editbar bottombar">
<ul class="markdown-help">
<li># H1</li>
<li>## H2</li>
<li>### H3</li>
<li><i>_italic_</i></li>
<li><b>**bold**</b></li>
<li>~~<s>strikethrough</s>~~</li>
<li>- Lists</li>
<li>1. Numbered lists</li>
<li>[Links](http://www.zeronet.io)</li>
<li>[References][1]<br>[1]: Can be used</li>
<li>![image alt](img/logo.png)</li>
<li>Inline <code>`code`</code></li>
<li><code>```python<br>print "Code block"<br>```</code></li>
<li>&gt; Quotes</li>
<li>--- Horizontal rule</li>
</ul>
<a href="#Markdown+help" class="icon-help">?</a> Editing: <span class="object">Post:21.body</span> <a href="#Save" class="button save">Save</a> <a href="#Delete" class="button button-delete button-outline delete">Delete</a> <a href="#Cancel" class="cancel">Cancel</a>
</div>
<!-- EOF editbar -->
<!-- publishbar -->
<div class="publishbar bottombar">
<small>Content changed</small> <a href="#Publish" class="button button-outline button-ok publish">Sign &amp; Publish new content</a>
</div>
<!-- EOF publishbar -->
<!-- left -->
<div class="left" data-object="Site">
<a href="?Home" class="nolink"><div class="avatar"> </div></a>
<h1><a href="?Home" class="nolink" data-editable="title" data-editable-mode="simple"></a></h1>
<h2 data-editable="description"></h2>
<hr>
<div class="links" data-editable="links">
</div>
</div>
<!-- EOF left -->
<!-- right -->
<div class="right">
<!-- Post listing -->
<div class="posts">
<a href="#New+Post" class="button button-outline new">Add new post</a>
<!-- Template: post -->
<div class="post template" data-object="Post:23" data-deletable="True">
<h1 class="title"><a href="?Post:23:Title" data-editable="title" data-editable-mode="simple" class="editable">Title</a></h1>
<div class="details">
<span class="published" data-editable="date_published" data-editable-mode="timestamp">21 hours ago &middot; 2 min read</span>
<a href="?Post:23:title" class="comments-num">&middot; <div class='icon-comment'></div> <span class="num">3 comments</span></a>
</div>
<div class="body" data-editable="body">Body</div>
<a class="more" href="#"><span class='readmore'>Read more</span></a>
</div>
<!-- EOF Template: post -->
</div>
<!-- EOF Post listing -->
<!-- Single Post show -->
<div class="post post-full" data-object="Post:23" data-deletable="True">
<h1 class="title"><a href="?Post:23:Title" data-editable="title" data-editable-mode="simple" class="editable">Title</a></h1>
<div class="details"> <span class="published" data-editable="date_published" data-editable-mode="timestamp">21 hours ago &middot; 2 min read</span> </div>
<div class="body" data-editable="body"></div>
<h2 id="Comments"><span class="comments-num">0</span> Comments:</h2>
<!-- New comment -->
<div class="comment comment-new">
<div class="info">
<a class="user_name certselect" href="#Change+user" title='Change user'>Please sign in</a>
&#9473;
<span class="added">new comment</span>
</div>
<div class="comment-body">
<a class="button button-submit button-certselect certselect" href="#Change+user"><div class='icon-profile'></div>Sign in as...</a>
<textarea class="comment-textarea"></textarea>
<a href="#Submit+comment" class="button button-submit button-submit-comment">Submit comment</a>
<div style='float: right; margin-top: -6px'>
<div class="user-size user-size-used"></div>
<div class="user-size"></div>
</div>
<div style="clear: both"></div>
</div>
</div>
<!-- EOF New comment -->
<div class="comments">
<!-- Template: Comment -->
<div class="comment template">
<div class="info">
<span class="user_name">user_name</span>
<!--<span class="cert_domain"></span>-->
&#9473;
<span class="added">1 day ago</span>
<a href="#Reply" class="reply"><div class="icon icon-reply"></div> <span class="reply-text">Reply</span></a>
</div>
<div class="comment-body">Body</div>
</div>
<!-- EOF Template: Comment -->
</div>
</div>
<!-- EOF Single Post sho -->
</div>
<!-- EOF right -->
<div style="clear: both"></div>
<script type="text/javascript" src="js/all.js" async></script>
</body>
</html>

File diff suppressed because one or more lines are too long

309
src/Tor/TorManager.py Normal file
View File

@ -0,0 +1,309 @@
import logging
import re
import socket
import binascii
import sys
import os
import time
import random
import subprocess
import atexit
import gevent
from Config import config
from Crypt import CryptRsa
from Crypt import CryptEd25519
from Site import SiteManager
import socks
from gevent.lock import RLock
from Debug import Debug
from Plugin import PluginManager
@PluginManager.acceptPlugins
class TorManager(object):
def __init__(self, fileserver_ip=None, fileserver_port=None):
self.privatekeys = {} # Onion: Privatekey
self.site_onions = {} # Site address: Onion
self.tor_exe = "tools/tor/tor.exe"
self.has_meek_bridges = os.path.isfile("tools/tor/PluggableTransports/meek-client.exe")
self.tor_process = None
self.log = logging.getLogger("TorManager")
self.start_onions = None
self.conn = None
self.lock = RLock()
self.starting = True
self.connecting = True
self.status = None
self.event_started = gevent.event.AsyncResult()
if config.tor == "disable":
self.enabled = False
self.start_onions = False
self.setStatus("Disabled")
else:
self.enabled = True
self.setStatus("Waiting")
if fileserver_port:
self.fileserver_port = fileserver_port
else:
self.fileserver_port = config.fileserver_port
self.ip, self.port = config.tor_controller.rsplit(":", 1)
self.port = int(self.port)
self.proxy_ip, self.proxy_port = config.tor_proxy.rsplit(":", 1)
self.proxy_port = int(self.proxy_port)
def start(self):
self.log.debug("Starting (Tor: %s)" % config.tor)
self.starting = True
try:
if not self.connect():
raise Exception(self.status)
self.log.debug("Tor proxy port %s check ok" % config.tor_proxy)
except Exception as err:
if sys.platform.startswith("win") and os.path.isfile(self.tor_exe):
self.log.info("Starting self-bundled Tor, due to Tor proxy port %s check error: %s" % (config.tor_proxy, err))
# Change to self-bundled Tor ports
self.port = 49051
self.proxy_port = 49050
if config.tor == "always":
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", self.proxy_port)
self.enabled = True
if not self.connect():
self.startTor()
else:
self.log.info("Disabling Tor, because error while accessing Tor proxy at port %s: %s" % (config.tor_proxy, err))
self.enabled = False
def setStatus(self, status):
self.status = status
if "main" in sys.modules: # import main has side-effects, breaks tests
import main
if "ui_server" in dir(main):
main.ui_server.updateWebsocket()
def startTor(self):
if sys.platform.startswith("win"):
try:
self.log.info("Starting Tor client %s..." % self.tor_exe)
tor_dir = os.path.dirname(self.tor_exe)
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
cmd = r"%s -f torrc --defaults-torrc torrc-defaults --ignore-missing-torrc" % self.tor_exe
if config.tor_use_bridges:
cmd += " --UseBridges 1"
self.tor_process = subprocess.Popen(cmd, cwd=tor_dir, close_fds=True, startupinfo=startupinfo)
for wait in range(1, 3): # Wait for startup
time.sleep(wait * 0.5)
self.enabled = True
if self.connect():
if self.isSubprocessRunning():
self.request("TAKEOWNERSHIP") # Shut down Tor client when controll connection closed
break
# Terminate on exit
atexit.register(self.stopTor)
except Exception as err:
self.log.error("Error starting Tor client: %s" % Debug.formatException(str(err)))
self.enabled = False
self.starting = False
self.event_started.set(False)
return False
def isSubprocessRunning(self):
return self.tor_process and self.tor_process.pid and self.tor_process.poll() is None
def stopTor(self):
self.log.debug("Stopping...")
try:
if self.isSubprocessRunning():
self.request("SIGNAL SHUTDOWN")
except Exception as err:
self.log.error("Error stopping Tor: %s" % err)
def connect(self):
if not self.enabled:
return False
self.site_onions = {}
self.privatekeys = {}
return self.connectController()
def connectController(self):
if "socket_noproxy" in dir(socket): # Socket proxy-patched, use non-proxy one
conn = socket.socket_noproxy(socket.AF_INET, socket.SOCK_STREAM)
else:
conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.log.debug("Connecting to Tor Controller %s:%s" % (self.ip, self.port))
self.connecting = True
try:
with self.lock:
conn.connect((self.ip, self.port))
# Auth cookie file
res_protocol = self.send("PROTOCOLINFO", conn)
cookie_match = re.search('COOKIEFILE="(.*?)"', res_protocol)
if config.tor_password:
res_auth = self.send('AUTHENTICATE "%s"' % config.tor_password, conn)
elif cookie_match:
cookie_file = cookie_match.group(1).encode("ascii").decode("unicode_escape")
if not os.path.isfile(cookie_file) and self.tor_process:
# Workaround for tor client cookie auth file utf8 encoding bug (https://github.com/torproject/stem/issues/57)
cookie_file = os.path.dirname(self.tor_exe) + "\\data\\control_auth_cookie"
auth_hex = binascii.b2a_hex(open(cookie_file, "rb").read())
res_auth = self.send("AUTHENTICATE %s" % auth_hex.decode("utf8"), conn)
else:
res_auth = self.send("AUTHENTICATE", conn)
if "250 OK" not in res_auth:
raise Exception("Authenticate error %s" % res_auth)
# Version 0.2.7.5 required because ADD_ONION support
res_version = self.send("GETINFO version", conn)
version = re.search(r'version=([0-9\.]+)', res_version).group(1)
if float(version.replace(".", "0", 2)) < 207.5:
raise Exception("Tor version >=0.2.7.5 required, found: %s" % version)
self.setStatus("Connected (%s)" % res_auth)
self.event_started.set(True)
self.starting = False
self.connecting = False
self.conn = conn
except Exception as err:
self.conn = None
self.setStatus("Error (%s)" % str(err))
self.log.warning("Tor controller connect error: %s" % Debug.formatException(str(err)))
self.enabled = False
return self.conn
def disconnect(self):
if self.conn:
self.conn.close()
self.conn = None
def startOnions(self):
if self.enabled:
self.log.debug("Start onions")
self.start_onions = True
self.getOnion("global")
# Get new exit node ip
def resetCircuits(self):
res = self.request("SIGNAL NEWNYM")
if "250 OK" not in res:
self.setStatus("Reset circuits error (%s)" % res)
self.log.error("Tor reset circuits error: %s" % res)
def addOnion(self):
if len(self.privatekeys) >= config.tor_hs_limit:
return random.choice([key for key in list(self.privatekeys.keys()) if key != self.site_onions.get("global")])
result = self.makeOnionAndKey()
if result:
onion_address, onion_privatekey = result
self.privatekeys[onion_address] = onion_privatekey
self.setStatus("OK (%s onions running)" % len(self.privatekeys))
SiteManager.peer_blacklist.append((onion_address + ".onion", self.fileserver_port))
return onion_address
else:
return False
def makeOnionAndKey(self):
res = self.request("ADD_ONION NEW:ED25519-V3 port=%s" % self.fileserver_port)
match = re.search("ServiceID=([A-Za-z0-9]+).*PrivateKey=ED25519-V3:(.*?)[\r\n]", res, re.DOTALL)
if match:
onion_address, onion_privatekey = match.groups()
return (onion_address, onion_privatekey)
else:
self.setStatus("AddOnion error (%s)" % res)
self.log.error("Tor addOnion error: %s" % res)
return False
def delOnion(self, address):
res = self.request("DEL_ONION %s" % address)
if "250 OK" in res:
del self.privatekeys[address]
self.setStatus("OK (%s onion running)" % len(self.privatekeys))
return True
else:
self.setStatus("DelOnion error (%s)" % res)
self.log.error("Tor delOnion error: %s" % res)
self.disconnect()
return False
def request(self, cmd):
with self.lock:
if not self.enabled:
return False
if not self.conn:
if not self.connect():
return ""
return self.send(cmd)
def send(self, cmd, conn=None):
if not conn:
conn = self.conn
self.log.debug("> %s" % cmd)
back = ""
for retry in range(2):
try:
conn.sendall(b"%s\r\n" % cmd.encode("utf8"))
while not back.endswith("250 OK\r\n"):
back += conn.recv(1024 * 64).decode("utf8")
break
except Exception as err:
self.log.error("Tor send error: %s, reconnecting..." % err)
if not self.connecting:
self.disconnect()
time.sleep(1)
self.connect()
back = None
if back:
self.log.debug("< %s" % back.strip())
return back
def getPrivatekey(self, address):
return self.privatekeys[address]
def getPublickey(self, address):
return CryptRsa.privatekeyToPublickey(self.privatekeys[address])
def getOnion(self, site_address):
if not self.enabled:
return None
if config.tor == "always": # Different onion for every site
onion = self.site_onions.get(site_address)
else: # Same onion for every site
onion = self.site_onions.get("global")
site_address = "global"
if not onion:
with self.lock:
self.site_onions[site_address] = self.addOnion()
onion = self.site_onions[site_address]
self.log.debug("Created new hidden service for %s: %s" % (site_address, onion))
return onion
# Creates and returns a
# socket that has connected to the Tor Network
def createSocket(self, onion, port):
if not self.enabled:
return False
self.log.debug("Creating new Tor socket to %s:%s" % (onion, port))
if self.starting:
self.log.debug("Waiting for startup...")
self.event_started.get()
if config.tor == "always": # Every socket is proxied by default, in this mode
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
else:
sock = socks.socksocket()
sock.set_proxy(socks.SOCKS5, self.proxy_ip, self.proxy_port)
return sock

1
src/Tor/__init__.py Normal file
View File

@ -0,0 +1 @@
from .TorManager import TorManager