ZeroNet/src/Test/conftest.py

413 lines
13 KiB
Python
Raw Normal View History

import os
import sys
2019-03-15 21:06:59 +01:00
import urllib.request
import time
import logging
import json
import shutil
2016-11-07 22:43:26 +01:00
import gc
import datetime
2019-03-27 03:10:21 +01:00
import atexit
import pytest
import mock
2019-01-20 03:10:39 +01:00
import gevent
2019-04-15 16:54:49 +02:00
import gevent.event
2019-01-20 03:10:39 +01:00
from gevent import monkey
monkey.patch_all(thread=False, subprocess=False)
2019-03-15 21:06:59 +01:00
def pytest_addoption(parser):
parser.addoption("--slow", action='store_true', default=False, help="Also run slow tests")
2019-03-15 21:06:59 +01:00
2019-01-20 20:02:31 +01:00
def pytest_collection_modifyitems(config, items):
if config.getoption("--slow"):
# --runslow given in cli: do not skip slow tests
return
skip_slow = pytest.mark.skip(reason="need --slow option to run")
for item in items:
if "slow" in item.keywords:
item.add_marker(skip_slow)
# Config
if sys.platform == "win32":
CHROMEDRIVER_PATH = "tools/chrome/chromedriver.exe"
else:
CHROMEDRIVER_PATH = "chromedriver"
SITE_URL = "http://127.0.0.1:43110"
2019-04-15 22:18:40 +02:00
TEST_DATA_PATH = 'src/Test/testdata'
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__) + "/../lib")) # External modules directory
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__) + "/..")) # Imports relative to src dir
from Config import config
config.argv = ["none"] # Dont pass any argv to config parser
2019-04-15 22:18:40 +02:00
config.parse(silent=True, parse_config=False) # Plugins need to access the configuration
config.action = "test"
logging.basicConfig(level=logging.DEBUG, stream=sys.stdout)
2019-03-15 21:06:59 +01:00
# Set custom formatter with realative time format (via: https://stackoverflow.com/questions/31521859/python-logging-module-time-since-last-log)
class TimeFilter(logging.Filter):
def filter(self, record):
try:
2019-03-15 21:06:59 +01:00
last = self.last
except AttributeError:
2019-03-15 21:06:59 +01:00
last = record.relativeCreated
2019-03-16 00:14:11 +01:00
delta = datetime.datetime.fromtimestamp(record.relativeCreated / 1000.0) - datetime.datetime.fromtimestamp(last / 1000.0)
2019-03-15 21:06:59 +01:00
record.relative = '{0:.3f}'.format(delta.seconds + delta.microseconds / 1000000.0)
self.last = record.relativeCreated
return True
log = logging.getLogger()
fmt = logging.Formatter(fmt='+%(relative)ss %(levelname)-8s %(name)s %(message)s')
[hndl.addFilter(TimeFilter()) for hndl in log.handlers]
[hndl.setFormatter(fmt) for hndl in log.handlers]
# Load plugins
from Plugin import PluginManager
2016-03-23 13:30:18 +01:00
config.data_dir = TEST_DATA_PATH # Use test data for unittests
2016-03-23 13:30:18 +01:00
os.chdir(os.path.abspath(os.path.dirname(__file__) + "/../..")) # Set working dir
all_loaded = PluginManager.plugin_manager.loadPlugins()
assert all_loaded, "There was error loading plugins"
2019-01-20 03:10:39 +01:00
config.loadPlugins()
2019-04-15 22:18:40 +02:00
config.parse(parse_config=False) # Parse again to add plugin configuration options
2019-01-20 03:10:39 +01:00
2019-03-16 00:14:11 +01:00
config.action = "test"
2019-01-20 03:10:39 +01:00
config.debug_socket = True # Use test data for unittests
config.verbose = True # Use test data for unittests
config.tor = "disable" # Don't start Tor client
config.trackers = []
config.data_dir = TEST_DATA_PATH # Use test data for unittests
2019-03-16 00:14:11 +01:00
config.initLogging()
from Site.Site import Site
from Site import SiteManager
from User import UserManager
from File import FileServer
from Connection import ConnectionServer
from Crypt import CryptConnection
2019-03-16 00:14:58 +01:00
from Crypt import CryptBitcoin
from Ui import UiWebsocket
from Tor import TorManager
from Content import ContentDb
from util import RateLimit
2016-11-07 22:42:27 +01:00
from Db import Db
2019-04-15 22:19:00 +02:00
from Debug import Debug
2019-03-16 00:14:58 +01:00
2019-03-27 03:10:21 +01:00
def cleanup():
Db.dbCloseAll()
2019-03-27 03:10:21 +01:00
for dir_path in [config.data_dir, config.data_dir + "-temp"]:
for file_name in os.listdir(dir_path):
ext = file_name.rsplit(".", 1)[-1]
if ext not in ["csr", "pem", "srl", "db", "json", "tmp"]:
2019-03-27 03:10:21 +01:00
continue
file_path = dir_path + "/" + file_name
if os.path.isfile(file_path):
os.unlink(file_path)
atexit.register(cleanup)
@pytest.fixture(scope="session")
def resetSettings(request):
open("%s/sites.json" % config.data_dir, "w").write("{}")
2018-06-25 14:25:19 +02:00
open("%s/filters.json" % config.data_dir, "w").write("{}")
open("%s/users.json" % config.data_dir, "w").write("""
{
"15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc": {
"certs": {},
"master_seed": "024bceac1105483d66585d8a60eaf20aa8c3254b0f266e0d626ddb6114e2949a",
"sites": {}
}
}
""")
2019-03-16 00:14:58 +01:00
@pytest.fixture(scope="session")
def resetTempSettings(request):
data_dir_temp = config.data_dir + "-temp"
if not os.path.isdir(data_dir_temp):
os.mkdir(data_dir_temp)
open("%s/sites.json" % data_dir_temp, "w").write("{}")
2018-06-25 14:25:19 +02:00
open("%s/filters.json" % data_dir_temp, "w").write("{}")
open("%s/users.json" % data_dir_temp, "w").write("""
{
"15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc": {
"certs": {},
"master_seed": "024bceac1105483d66585d8a60eaf20aa8c3254b0f266e0d626ddb6114e2949a",
"sites": {}
}
}
""")
def cleanup():
os.unlink("%s/sites.json" % data_dir_temp)
os.unlink("%s/users.json" % data_dir_temp)
2018-06-25 14:25:19 +02:00
os.unlink("%s/filters.json" % data_dir_temp)
request.addfinalizer(cleanup)
@pytest.fixture()
def site(request):
2016-11-07 22:43:26 +01:00
threads_before = [obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet)]
# Reset ratelimit
RateLimit.queue_db = {}
RateLimit.called_db = {}
site = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
# Always use original data
assert "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" in site.storage.getPath("") # Make sure we dont delete everything
shutil.rmtree(site.storage.getPath(""), True)
shutil.copytree(site.storage.getPath("") + "-original", site.storage.getPath(""))
2018-04-03 14:50:09 +02:00
# Add to site manager
SiteManager.site_manager.get("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
site.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
def cleanup():
site.storage.deleteFiles()
site.content_manager.contents.db.deleteSite(site)
del SiteManager.site_manager.sites["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"]
site.content_manager.contents.db.close()
2018-04-03 14:50:09 +02:00
SiteManager.site_manager.sites.clear()
db_path = "%s/content.db" % config.data_dir
os.unlink(db_path)
del ContentDb.content_dbs[db_path]
2016-11-07 22:43:26 +01:00
gevent.killall([obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet) and obj not in threads_before])
request.addfinalizer(cleanup)
site = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") # Create new Site object to load content.json files
if not SiteManager.site_manager.sites:
SiteManager.site_manager.sites = {}
SiteManager.site_manager.sites["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"] = site
return site
@pytest.fixture()
def site_temp(request):
2016-11-07 22:43:26 +01:00
threads_before = [obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet)]
with mock.patch("Config.config.data_dir", config.data_dir + "-temp"):
site_temp = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
2016-10-02 14:23:30 +02:00
site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
def cleanup():
site_temp.storage.deleteFiles()
site_temp.content_manager.contents.db.deleteSite(site_temp)
site_temp.content_manager.contents.db.close()
db_path = "%s-temp/content.db" % config.data_dir
os.unlink(db_path)
del ContentDb.content_dbs[db_path]
2016-11-07 22:43:26 +01:00
gevent.killall([obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet) and obj not in threads_before])
request.addfinalizer(cleanup)
return site_temp
@pytest.fixture(scope="session")
def user():
user = UserManager.user_manager.get()
2019-04-15 22:19:16 +02:00
if not user:
user = UserManager.user_manager.create()
user.sites = {} # Reset user data
return user
@pytest.fixture(scope="session")
def browser(request):
try:
from selenium import webdriver
2019-03-15 21:06:59 +01:00
print("Starting chromedriver...")
options = webdriver.chrome.options.Options()
options.add_argument("--headless")
options.add_argument("--window-size=1920x1080")
options.add_argument("--log-level=1")
2019-01-20 20:02:31 +01:00
browser = webdriver.Chrome(executable_path=CHROMEDRIVER_PATH, service_log_path=os.path.devnull, options=options)
def quit():
browser.quit()
request.addfinalizer(quit)
2019-03-15 21:06:59 +01:00
except Exception as err:
raise pytest.skip("Test requires selenium + chromedriver: %s" % err)
return browser
@pytest.fixture(scope="session")
def site_url():
try:
2019-03-15 21:06:59 +01:00
urllib.request.urlopen(SITE_URL).read()
except Exception as err:
raise pytest.skip("Test requires zeronet client running: %s" % err)
return SITE_URL
@pytest.fixture(params=['ipv4', 'ipv6'])
def file_server(request):
if request.param == "ipv4":
2019-01-20 20:02:31 +01:00
return request.getfixturevalue("file_server4")
else:
2019-01-20 20:02:31 +01:00
return request.getfixturevalue("file_server6")
@pytest.fixture
def file_server4(request):
time.sleep(0.1)
file_server = FileServer("127.0.0.1", 1544)
file_server.ip_external = "1.2.3.4" # Fake external ip
def listen():
ConnectionServer.start(file_server)
ConnectionServer.listen(file_server)
gevent.spawn(listen)
# Wait for port opening
for retry in range(10):
time.sleep(0.1) # Port opening
try:
conn = file_server.getConnection("127.0.0.1", 1544)
conn.close()
break
2019-03-15 21:06:59 +01:00
except Exception as err:
2019-04-15 22:19:00 +02:00
print("FileServer6 startup error", Debug.formatException(err))
assert file_server.running
file_server.ip_incoming = {} # Reset flood protection
def stop():
file_server.stop()
request.addfinalizer(stop)
return file_server
2019-03-15 21:06:59 +01:00
@pytest.fixture
def file_server6(request):
time.sleep(0.1)
file_server6 = FileServer("::1", 1544)
2019-01-26 20:39:31 +01:00
file_server6.ip_external = 'fca5:95d6:bfde:d902:8951:276e:1111:a22c' # Fake external ip
def listen():
ConnectionServer.start(file_server6)
ConnectionServer.listen(file_server6)
gevent.spawn(listen)
# Wait for port opening
for retry in range(10):
time.sleep(0.1) # Port opening
try:
conn = file_server6.getConnection("::1", 1544)
conn.close()
break
2019-03-15 21:06:59 +01:00
except Exception as err:
2019-04-15 22:19:00 +02:00
print("FileServer6 startup error", Debug.formatException(err))
assert file_server6.running
file_server6.ip_incoming = {} # Reset flood protection
def stop():
file_server6.stop()
request.addfinalizer(stop)
return file_server6
2019-03-16 00:14:58 +01:00
@pytest.fixture()
2019-03-16 00:15:19 +01:00
def ui_websocket(site, user):
class WsMock:
def __init__(self):
2019-04-15 16:54:49 +02:00
self.result = gevent.event.AsyncResult()
def send(self, data):
2019-04-15 16:54:49 +02:00
self.result.set(json.loads(data)["result"])
2019-03-16 00:15:19 +01:00
def getResult(self):
2019-04-15 16:54:49 +02:00
back = self.result.get()
self.result = gevent.event.AsyncResult()
2019-03-16 00:15:19 +01:00
return back
ws_mock = WsMock()
2019-03-16 00:15:19 +01:00
ui_websocket = UiWebsocket(ws_mock, site, None, user, None)
def testAction(action, *args, **kwargs):
func = getattr(ui_websocket, "action%s" % action)
func(0, *args, **kwargs)
2019-04-15 16:54:49 +02:00
return ui_websocket.ws.result.get()
ui_websocket.testAction = testAction
return ui_websocket
@pytest.fixture(scope="session")
def tor_manager():
try:
2019-04-15 22:19:38 +02:00
tor_manager = TorManager(fileserver_port=1544)
tor_manager.start()
2019-03-27 03:10:29 +01:00
assert tor_manager.conn is not None
tor_manager.startOnions()
2019-03-15 21:06:59 +01:00
except Exception as err:
raise pytest.skip("Test requires Tor with ControlPort: %s, %s" % (config.tor_controller, err))
return tor_manager
2016-11-07 22:42:27 +01:00
2019-03-15 21:06:59 +01:00
2016-11-07 22:42:27 +01:00
@pytest.fixture()
def db(request):
db_path = "%s/zeronet.db" % config.data_dir
schema = {
"db_name": "TestDb",
"db_file": "%s/zeronet.db" % config.data_dir,
"maps": {
"data.json": {
"to_table": [
"test",
{"node": "test", "table": "test_importfilter", "import_cols": ["test_id", "title"]}
]
}
},
"tables": {
"test": {
"cols": [
["test_id", "INTEGER"],
["title", "TEXT"],
["json_id", "INTEGER REFERENCES json (json_id)"]
],
"indexes": ["CREATE UNIQUE INDEX test_id ON test(test_id)"],
"schema_changed": 1426195822
},
"test_importfilter": {
"cols": [
["test_id", "INTEGER"],
["title", "TEXT"],
["json_id", "INTEGER REFERENCES json (json_id)"]
],
"indexes": ["CREATE UNIQUE INDEX test_importfilter_id ON test_importfilter(test_id)"],
"schema_changed": 1426195822
}
}
}
if os.path.isfile(db_path):
os.unlink(db_path)
db = Db.Db(schema, db_path)
2016-11-07 22:42:27 +01:00
db.checkTables()
def stop():
db.close()
os.unlink(db_path)
request.addfinalizer(stop)
return db
2019-03-16 00:14:58 +01:00
@pytest.fixture(params=["btctools", "openssl", "libsecp256k1"])
def crypt_bitcoin_lib(request, monkeypatch):
monkeypatch.setattr(CryptBitcoin, "lib_verify_best", request.param)
CryptBitcoin.loadLib(request.param)
return CryptBitcoin