ZeroNet/src/Test/conftest.py

487 lines
16 KiB
Python
Raw Normal View History

import os
import sys
2019-03-15 21:06:59 +01:00
import urllib.request
import time
import logging
import json
import shutil
2016-11-07 22:43:26 +01:00
import gc
import datetime
2019-03-27 03:10:21 +01:00
import atexit
2019-12-21 03:01:45 +01:00
import threading
import pytest
import mock
2019-01-20 03:10:39 +01:00
import gevent
2019-12-17 14:32:17 +01:00
if "libev" not in str(gevent.config.loop):
# Workaround for random crash when libuv used with threads
gevent.config.loop = "libev-cext"
2019-04-15 16:54:49 +02:00
import gevent.event
2019-01-20 03:10:39 +01:00
from gevent import monkey
monkey.patch_all(thread=False, subprocess=False)
atexit_register = atexit.register
atexit.register = lambda func: "" # Don't register shutdown functions to avoid IO error on exit
2019-03-15 21:06:59 +01:00
def pytest_addoption(parser):
parser.addoption("--slow", action='store_true', default=False, help="Also run slow tests")
2019-03-15 21:06:59 +01:00
2019-01-20 20:02:31 +01:00
def pytest_collection_modifyitems(config, items):
if config.getoption("--slow"):
# --runslow given in cli: do not skip slow tests
return
skip_slow = pytest.mark.skip(reason="need --slow option to run")
for item in items:
if "slow" in item.keywords:
item.add_marker(skip_slow)
# Config
if sys.platform == "win32":
CHROMEDRIVER_PATH = "tools/chrome/chromedriver.exe"
else:
CHROMEDRIVER_PATH = "chromedriver"
SITE_URL = "http://127.0.0.1:43110"
TEST_DATA_PATH = 'src/Test/testdata'
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__) + "/../lib")) # External modules directory
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__) + "/..")) # Imports relative to src dir
from Config import config
config.argv = ["none"] # Dont pass any argv to config parser
2019-04-15 22:18:40 +02:00
config.parse(silent=True, parse_config=False) # Plugins need to access the configuration
config.action = "test"
# Load plugins
from Plugin import PluginManager
2016-03-23 13:30:18 +01:00
config.data_dir = TEST_DATA_PATH # Use test data for unittests
2019-04-15 22:48:16 +02:00
config.debug = True
2016-03-23 13:30:18 +01:00
os.chdir(os.path.abspath(os.path.dirname(__file__) + "/../..")) # Set working dir
all_loaded = PluginManager.plugin_manager.loadPlugins()
2019-04-15 22:48:09 +02:00
assert all_loaded, "Not all plugin loaded successfully"
2019-01-20 03:10:39 +01:00
config.loadPlugins()
2019-04-15 22:18:40 +02:00
config.parse(parse_config=False) # Parse again to add plugin configuration options
2019-01-20 03:10:39 +01:00
2019-03-16 00:14:11 +01:00
config.action = "test"
2019-04-15 22:48:16 +02:00
config.debug = True
2019-01-20 03:10:39 +01:00
config.debug_socket = True # Use test data for unittests
config.verbose = True # Use test data for unittests
config.tor = "disable" # Don't start Tor client
config.trackers = []
config.data_dir = TEST_DATA_PATH # Use test data for unittests
if "ZERONET_LOG_DIR" in os.environ:
config.log_dir = os.environ["ZERONET_LOG_DIR"]
2019-07-10 03:12:56 +02:00
config.initLogging(console_logging=False)
2019-04-23 02:00:59 +02:00
# Set custom formatter with realative time format (via: https://stackoverflow.com/questions/31521859/python-logging-module-time-since-last-log)
2019-12-21 03:01:45 +01:00
time_start = time.time()
2019-04-23 02:00:59 +02:00
class TimeFilter(logging.Filter):
2019-12-21 03:01:45 +01:00
def __init__(self, *args, **kwargs):
self.time_last = time.time()
self.main_thread_id = threading.current_thread().ident
super().__init__(*args, **kwargs)
2019-04-23 02:00:59 +02:00
def filter(self, record):
2019-12-21 03:01:45 +01:00
if threading.current_thread().ident != self.main_thread_id:
record.thread_marker = "T"
record.thread_title = "(Thread#%s)" % self.main_thread_id
else:
record.thread_marker = " "
record.thread_title = ""
since_last = time.time() - self.time_last
if since_last > 0.1:
line_marker = "!"
elif since_last > 0.02:
line_marker = "*"
elif since_last > 0.01:
line_marker = "-"
else:
line_marker = " "
since_start = time.time() - time_start
record.since_start = "%s%.3fs" % (line_marker, since_start)
self.time_last = time.time()
2019-04-23 02:00:59 +02:00
return True
log = logging.getLogger()
2019-12-21 03:01:45 +01:00
fmt = logging.Formatter(fmt='%(since_start)s %(thread_marker)s %(levelname)-8s %(name)s %(message)s %(thread_title)s')
2019-04-23 02:00:59 +02:00
[hndl.addFilter(TimeFilter()) for hndl in log.handlers]
[hndl.setFormatter(fmt) for hndl in log.handlers]
from Site.Site import Site
from Site import SiteManager
from User import UserManager
from File import FileServer
from Connection import ConnectionServer
from Crypt import CryptConnection
2019-03-16 00:14:58 +01:00
from Crypt import CryptBitcoin
from Ui import UiWebsocket
from Tor import TorManager
from Content import ContentDb
from util import RateLimit
2016-11-07 22:42:27 +01:00
from Db import Db
2019-04-15 22:19:00 +02:00
from Debug import Debug
2019-11-25 14:35:31 +01:00
gevent.get_hub().NOT_ERROR += (Debug.Notify,)
2019-03-16 00:14:58 +01:00
2019-03-27 03:10:21 +01:00
def cleanup():
Db.dbCloseAll()
2019-03-27 03:10:21 +01:00
for dir_path in [config.data_dir, config.data_dir + "-temp"]:
if os.path.isdir(dir_path):
for file_name in os.listdir(dir_path):
ext = file_name.rsplit(".", 1)[-1]
if ext not in ["csr", "pem", "srl", "db", "json", "tmp"]:
continue
file_path = dir_path + "/" + file_name
if os.path.isfile(file_path):
os.unlink(file_path)
2019-03-27 03:10:21 +01:00
atexit_register(cleanup)
2019-03-27 03:10:21 +01:00
@pytest.fixture(scope="session")
def resetSettings(request):
open("%s/sites.json" % config.data_dir, "w").write("{}")
2018-06-25 14:25:19 +02:00
open("%s/filters.json" % config.data_dir, "w").write("{}")
open("%s/users.json" % config.data_dir, "w").write("""
{
"15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc": {
"certs": {},
"master_seed": "024bceac1105483d66585d8a60eaf20aa8c3254b0f266e0d626ddb6114e2949a",
"sites": {}
}
}
""")
2019-03-16 00:14:58 +01:00
@pytest.fixture(scope="session")
def resetTempSettings(request):
data_dir_temp = config.data_dir + "-temp"
if not os.path.isdir(data_dir_temp):
os.mkdir(data_dir_temp)
open("%s/sites.json" % data_dir_temp, "w").write("{}")
2018-06-25 14:25:19 +02:00
open("%s/filters.json" % data_dir_temp, "w").write("{}")
open("%s/users.json" % data_dir_temp, "w").write("""
{
"15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc": {
"certs": {},
"master_seed": "024bceac1105483d66585d8a60eaf20aa8c3254b0f266e0d626ddb6114e2949a",
"sites": {}
}
}
""")
def cleanup():
os.unlink("%s/sites.json" % data_dir_temp)
os.unlink("%s/users.json" % data_dir_temp)
2018-06-25 14:25:19 +02:00
os.unlink("%s/filters.json" % data_dir_temp)
request.addfinalizer(cleanup)
@pytest.fixture()
def site(request):
2016-11-07 22:43:26 +01:00
threads_before = [obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet)]
# Reset ratelimit
RateLimit.queue_db = {}
RateLimit.called_db = {}
site = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
# Always use original data
assert "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" in site.storage.getPath("") # Make sure we dont delete everything
shutil.rmtree(site.storage.getPath(""), True)
shutil.copytree(site.storage.getPath("") + "-original", site.storage.getPath(""))
2018-04-03 14:50:09 +02:00
# Add to site manager
SiteManager.site_manager.get("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
site.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
def cleanup():
site.delete()
2019-12-17 14:34:29 +01:00
site.content_manager.contents.db.close("Test cleanup")
site.content_manager.contents.db.timer_check_optional.kill()
2018-04-03 14:50:09 +02:00
SiteManager.site_manager.sites.clear()
db_path = "%s/content.db" % config.data_dir
os.unlink(db_path)
del ContentDb.content_dbs[db_path]
2016-11-07 22:43:26 +01:00
gevent.killall([obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet) and obj not in threads_before])
request.addfinalizer(cleanup)
site.greenlet_manager.stopGreenlets()
site = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") # Create new Site object to load content.json files
if not SiteManager.site_manager.sites:
SiteManager.site_manager.sites = {}
SiteManager.site_manager.sites["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"] = site
site.settings["serving"] = True
return site
@pytest.fixture()
def site_temp(request):
2016-11-07 22:43:26 +01:00
threads_before = [obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet)]
with mock.patch("Config.config.data_dir", config.data_dir + "-temp"):
site_temp = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
site_temp.settings["serving"] = True
2016-10-02 14:23:30 +02:00
site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
def cleanup():
site_temp.delete()
2019-12-17 14:34:29 +01:00
site_temp.content_manager.contents.db.close("Test cleanup")
site_temp.content_manager.contents.db.timer_check_optional.kill()
db_path = "%s-temp/content.db" % config.data_dir
os.unlink(db_path)
del ContentDb.content_dbs[db_path]
2016-11-07 22:43:26 +01:00
gevent.killall([obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet) and obj not in threads_before])
request.addfinalizer(cleanup)
site_temp.log = logging.getLogger("Temp:%s" % site_temp.address_short)
return site_temp
@pytest.fixture(scope="session")
def user():
user = UserManager.user_manager.get()
2019-04-15 22:19:16 +02:00
if not user:
user = UserManager.user_manager.create()
user.sites = {} # Reset user data
return user
@pytest.fixture(scope="session")
def browser(request):
try:
from selenium import webdriver
2019-03-15 21:06:59 +01:00
print("Starting chromedriver...")
options = webdriver.chrome.options.Options()
options.add_argument("--headless")
options.add_argument("--window-size=1920x1080")
options.add_argument("--log-level=1")
2019-01-20 20:02:31 +01:00
browser = webdriver.Chrome(executable_path=CHROMEDRIVER_PATH, service_log_path=os.path.devnull, options=options)
def quit():
browser.quit()
request.addfinalizer(quit)
2019-03-15 21:06:59 +01:00
except Exception as err:
raise pytest.skip("Test requires selenium + chromedriver: %s" % err)
return browser
@pytest.fixture(scope="session")
def site_url():
try:
2019-03-15 21:06:59 +01:00
urllib.request.urlopen(SITE_URL).read()
except Exception as err:
raise pytest.skip("Test requires zeronet client running: %s" % err)
return SITE_URL
@pytest.fixture(params=['ipv4', 'ipv6'])
def file_server(request):
if request.param == "ipv4":
2019-01-20 20:02:31 +01:00
return request.getfixturevalue("file_server4")
else:
2019-01-20 20:02:31 +01:00
return request.getfixturevalue("file_server6")
@pytest.fixture
def file_server4(request):
time.sleep(0.1)
file_server = FileServer("127.0.0.1", 1544)
file_server.ip_external = "1.2.3.4" # Fake external ip
def listen():
ConnectionServer.start(file_server)
ConnectionServer.listen(file_server)
gevent.spawn(listen)
# Wait for port opening
for retry in range(10):
time.sleep(0.1) # Port opening
try:
conn = file_server.getConnection("127.0.0.1", 1544)
conn.close()
break
2019-03-15 21:06:59 +01:00
except Exception as err:
2019-04-15 22:19:00 +02:00
print("FileServer6 startup error", Debug.formatException(err))
assert file_server.running
file_server.ip_incoming = {} # Reset flood protection
def stop():
file_server.stop()
request.addfinalizer(stop)
return file_server
2019-03-15 21:06:59 +01:00
@pytest.fixture
def file_server6(request):
time.sleep(0.1)
file_server6 = FileServer("::1", 1544)
2019-01-26 20:39:31 +01:00
file_server6.ip_external = 'fca5:95d6:bfde:d902:8951:276e:1111:a22c' # Fake external ip
def listen():
ConnectionServer.start(file_server6)
ConnectionServer.listen(file_server6)
gevent.spawn(listen)
# Wait for port opening
for retry in range(10):
time.sleep(0.1) # Port opening
try:
conn = file_server6.getConnection("::1", 1544)
conn.close()
break
2019-03-15 21:06:59 +01:00
except Exception as err:
2019-04-15 22:19:00 +02:00
print("FileServer6 startup error", Debug.formatException(err))
assert file_server6.running
file_server6.ip_incoming = {} # Reset flood protection
def stop():
file_server6.stop()
request.addfinalizer(stop)
return file_server6
2019-03-16 00:14:58 +01:00
@pytest.fixture()
2019-03-16 00:15:19 +01:00
def ui_websocket(site, user):
class WsMock:
def __init__(self):
2019-04-15 16:54:49 +02:00
self.result = gevent.event.AsyncResult()
def send(self, data):
logging.debug("WsMock: Set result (data: %s) called by %s" % (data, Debug.formatStack()))
2019-04-15 16:54:49 +02:00
self.result.set(json.loads(data)["result"])
2019-03-16 00:15:19 +01:00
def getResult(self):
2019-12-21 03:01:45 +01:00
logging.debug("WsMock: Get result")
2019-04-15 16:54:49 +02:00
back = self.result.get()
2019-12-21 03:30:27 +01:00
logging.debug("WsMock: Got result (data: %s)" % back)
2019-04-15 16:54:49 +02:00
self.result = gevent.event.AsyncResult()
2019-03-16 00:15:19 +01:00
return back
ws_mock = WsMock()
2019-03-16 00:15:19 +01:00
ui_websocket = UiWebsocket(ws_mock, site, None, user, None)
def testAction(action, *args, **kwargs):
ui_websocket.handleRequest({"id": 0, "cmd": action, "params": list(args) if args else kwargs})
return ui_websocket.ws.getResult()
ui_websocket.testAction = testAction
return ui_websocket
@pytest.fixture(scope="session")
def tor_manager():
try:
2019-04-15 22:19:38 +02:00
tor_manager = TorManager(fileserver_port=1544)
tor_manager.start()
2019-03-27 03:10:29 +01:00
assert tor_manager.conn is not None
tor_manager.startOnions()
2019-03-15 21:06:59 +01:00
except Exception as err:
raise pytest.skip("Test requires Tor with ControlPort: %s, %s" % (config.tor_controller, err))
return tor_manager
2016-11-07 22:42:27 +01:00
2019-03-15 21:06:59 +01:00
2016-11-07 22:42:27 +01:00
@pytest.fixture()
def db(request):
db_path = "%s/zeronet.db" % config.data_dir
schema = {
"db_name": "TestDb",
"db_file": "%s/zeronet.db" % config.data_dir,
"maps": {
"data.json": {
"to_table": [
"test",
{"node": "test", "table": "test_importfilter", "import_cols": ["test_id", "title"]}
]
}
},
"tables": {
"test": {
"cols": [
["test_id", "INTEGER"],
["title", "TEXT"],
["json_id", "INTEGER REFERENCES json (json_id)"]
],
"indexes": ["CREATE UNIQUE INDEX test_id ON test(test_id)"],
"schema_changed": 1426195822
},
"test_importfilter": {
"cols": [
["test_id", "INTEGER"],
["title", "TEXT"],
["json_id", "INTEGER REFERENCES json (json_id)"]
],
"indexes": ["CREATE UNIQUE INDEX test_importfilter_id ON test_importfilter(test_id)"],
"schema_changed": 1426195822
}
}
}
if os.path.isfile(db_path):
os.unlink(db_path)
db = Db.Db(schema, db_path)
2016-11-07 22:42:27 +01:00
db.checkTables()
def stop():
2019-12-17 14:35:49 +01:00
db.close("Test db cleanup")
2016-11-07 22:42:27 +01:00
os.unlink(db_path)
request.addfinalizer(stop)
return db
2019-03-16 00:14:58 +01:00
@pytest.fixture(params=["sslcrypto", "sslcrypto_fallback", "libsecp256k1"])
2019-03-16 00:14:58 +01:00
def crypt_bitcoin_lib(request, monkeypatch):
monkeypatch.setattr(CryptBitcoin, "lib_verify_best", request.param)
CryptBitcoin.loadLib(request.param)
return CryptBitcoin
2019-12-04 12:46:13 +01:00
@pytest.fixture(scope='function', autouse=True)
def logCaseStart(request):
2019-12-21 03:01:45 +01:00
global time_start
time_start = time.time()
2019-12-04 12:46:13 +01:00
logging.debug("---- Start test case: %s ----" % request._pyfuncitem)
yield None # Wait until all test done
2019-11-27 03:03:22 +01:00
2019-12-04 12:46:44 +01:00
# Workaround for pytest bug when logging in atexit/post-fixture handlers (I/O operation on closed file)
2019-11-27 03:03:22 +01:00
def workaroundPytestLogError():
import _pytest.capture
write_original = _pytest.capture.EncodedFile.write
def write_patched(obj, *args, **kwargs):
try:
write_original(obj, *args, **kwargs)
except ValueError as err:
if str(err) == "I/O operation on closed file":
pass
else:
raise err
def flush_patched(obj, *args, **kwargs):
try:
obj.buffer.flush(*args, **kwargs)
except ValueError as err:
if str(err).startswith("I/O operation on closed file"):
pass
else:
raise err
_pytest.capture.EncodedFile.write = write_patched
_pytest.capture.EncodedFile.flush = flush_patched
workaroundPytestLogError()
2019-11-27 03:03:31 +01:00
2019-12-04 12:46:44 +01:00
@pytest.fixture(scope='session', autouse=True)
def disableLog():
yield None # Wait until all test done
2019-12-04 12:46:13 +01:00
logging.getLogger('').setLevel(logging.getLevelName(logging.CRITICAL))