This commit is contained in:
Jason Rhinelander 2023-05-29 12:00:08 +10:00 committed by GitHub
commit e656e4f274
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
33 changed files with 2142 additions and 345 deletions

View File

@ -30,6 +30,7 @@
// Parts of this file are originally copyright (c) 2012-2013 The Cryptonote developers
#include <fstream>
#include <type_traits>
#include "account.h"
#include "epee/warnings.h"
@ -197,19 +198,19 @@ DISABLE_VS_WARNINGS(4244 4345)
}
//-----------------------------------------------------------------
void account_base::create_from_device(const std::string &device_name)
void account_base::create_from_device(const std::string &device_name, bool debug_reset)
{
hw::device &hwdev = hw::get_device(device_name);
hwdev.set_name(device_name);
create_from_device(hwdev);
create_from_device(hwdev, debug_reset);
}
void account_base::create_from_device(hw::device &hwdev)
void account_base::create_from_device(hw::device &hwdev, bool debug_reset)
{
m_keys.set_device(hwdev);
MCDEBUG("device", "device type: " << tools::type_name(typeid(hwdev)));
CHECK_AND_ASSERT_THROW_MES(hwdev.init(), "Device init failed");
CHECK_AND_ASSERT_THROW_MES(hwdev.connect(), "Device connect failed");
CHECK_AND_ASSERT_THROW_MES(hwdev.connect(debug_reset), "Device connect failed");
try {
CHECK_AND_ASSERT_THROW_MES(hwdev.get_public_address(m_keys.m_account_address), "Cannot get a device address");
CHECK_AND_ASSERT_THROW_MES(hwdev.get_secret_keys(m_keys.m_view_secret_key, m_keys.m_spend_secret_key), "Cannot get device secret");

View File

@ -75,8 +75,8 @@ namespace cryptonote
public:
account_base();
crypto::secret_key generate(const crypto::secret_key& recovery_key = crypto::secret_key(), bool recover = false, bool two_random = false);
void create_from_device(const std::string &device_name);
void create_from_device(hw::device &hwdev);
void create_from_device(const std::string &device_name, bool debug_reset = false);
void create_from_device(hw::device &hwdev, bool debug_reset = false);
void create_from_keys(const cryptonote::account_public_address& address, const crypto::secret_key& spendkey, const crypto::secret_key& viewkey);
void create_from_viewkey(const cryptonote::account_public_address& address, const crypto::secret_key& viewkey);
bool make_multisig(const crypto::secret_key &view_secret_key, const crypto::secret_key &spend_secret_key, const crypto::public_key &spend_public_key, const std::vector<crypto::secret_key> &multisig_keys);

View File

@ -126,7 +126,7 @@ namespace hw {
virtual bool init() = 0;
virtual bool release() = 0;
virtual bool connect() = 0;
virtual bool connect(bool debug_reset_network = false) = 0;
virtual bool disconnect() = 0;
virtual bool set_mode(mode m) { mode_ = m; return true; }

View File

@ -73,7 +73,7 @@ namespace hw::core {
return true;
}
bool device_default::connect() {
bool device_default::connect(bool) {
return true;
}
bool device_default::disconnect() {

View File

@ -52,7 +52,7 @@ namespace hw {
bool init() override;
bool release() override;
bool connect() override;
bool connect(bool ignored) override;
bool disconnect() override;
type get_type() const override { return type::SOFTWARE; };

View File

@ -257,6 +257,7 @@ namespace hw::ledger {
LEDGER_INS(RESET, 0x02);
LEDGER_INS(GET_NETWORK, 0x10);
LEDGER_INS(RESET_NETWORK, 0x11);
LEDGER_INS(GET_KEY, 0x20);
LEDGER_INS(DISPLAY_ADDRESS, 0x21);
@ -600,7 +601,7 @@ namespace hw::ledger {
{0x2c97, 0x501c, 0, 0xffa0}, {0x2c97, 0x501d, 0, 0xffa0}, {0x2c97, 0x501e, 0, 0xffa0}, {0x2c97, 0x501f, 0, 0xffa0},
};
bool device_ledger::connect() {
bool device_ledger::connect(bool debug_reset) {
disconnect();
if (auto* hid_io = dynamic_cast<io::hid*>(hw_device.get()))
hid_io->connect(known_devices);
@ -610,6 +611,11 @@ namespace hw::ledger {
throw std::logic_error{"Invalid ledger hardware configure"};
reset();
if (debug_reset) {
auto locks = tools::unique_locks(device_locker, command_locker);
send_simple(INS_RESET_NETWORK, static_cast<uint8_t>(nettype));
}
check_network_type();
#ifdef DEBUG_HWDEVICE

View File

@ -232,7 +232,7 @@ namespace hw::ledger {
std::string get_name() const override;
bool init() override;
bool release() override;
bool connect() override;
bool connect(bool debug_reset_network = false) override;
bool disconnect() override;
bool connected() const;

View File

@ -130,6 +130,7 @@ namespace
const auto arg_wallet_file = wallet_args::arg_wallet_file();
const command_line::arg_descriptor<std::string> arg_generate_new_wallet = {"generate-new-wallet", sw::tr("Generate new wallet and save it to <arg>"), ""};
const command_line::arg_descriptor<std::string> arg_generate_from_device = {"generate-from-device", sw::tr("Generate new wallet from device and save it to <arg>"), ""};
const command_line::arg_descriptor<bool> arg_debug_reset_device = {"debug-reset-device", sw::tr("Reset the hardware device when generating the wallet (requires a debugging hardware wallet)"), false};
const command_line::arg_descriptor<std::string> arg_generate_from_view_key = {"generate-from-view-key", sw::tr("Generate incoming-only wallet from view key"), ""};
const command_line::arg_descriptor<std::string> arg_generate_from_spend_key = {"generate-from-spend-key", sw::tr("Generate deterministic wallet from spend key"), ""};
const command_line::arg_descriptor<std::string> arg_generate_from_keys = {"generate-from-keys", sw::tr("Generate wallet from private keys"), ""};
@ -4058,6 +4059,7 @@ bool simple_wallet::handle_command_line(const boost::program_options::variables_
m_do_not_relay = command_line::get_arg(vm, arg_do_not_relay);
m_subaddress_lookahead = command_line::get_arg(vm, arg_subaddress_lookahead);
m_use_english_language_names = command_line::get_arg(vm, arg_use_english_language_names);
m_debug_reset_device = command_line::get_arg(vm, arg_debug_reset_device);
m_restoring = !m_generate_from_view_key.empty() ||
!m_generate_from_spend_key.empty() ||
!m_generate_from_keys.empty() ||
@ -4353,7 +4355,7 @@ std::optional<epee::wipeable_string> simple_wallet::new_device_wallet(const boos
"spend key (needed to spend funds) does not leave the device.");
m_wallet->restore_from_device(
m_wallet_file, std::move(rc.second).password(), device_desc.empty() ? "Ledger" : device_desc, create_address_file,
std::move(create_hwdev_txt), [](const std::string& msg) { message_writer(epee::console_color_green, true) << msg; });
std::move(create_hwdev_txt), m_debug_reset_device, [](const std::string& msg) { message_writer(epee::console_color_green, true) << msg; });
message_writer(epee::console_color_white, true) << tr("Finished setting up wallet from hw device");
}
catch (const std::exception& e)

View File

@ -434,6 +434,7 @@ namespace cryptonote
bool m_do_not_relay;
bool m_use_english_language_names;
bool m_has_locked_key_images;
bool m_debug_reset_device;
epee::console_handlers_binder m_cmd_binder;

View File

@ -268,6 +268,8 @@ struct options {
get_default_ringdb_path(),
{{ &testnet, &devnet, &regtest }},
[](std::array<bool, 3> test_dev_fake, bool defaulted, std::string val)->std::string {
if (val.empty())
return val;
if (test_dev_fake[0])
return (fs::u8path(val) / "testnet").u8string();
else if (test_dev_fake[1])
@ -4931,7 +4933,7 @@ void wallet2::generate(const fs::path& wallet_, const epee::wipeable_string& pas
}
void wallet2::restore_from_device(const fs::path& wallet_, const epee::wipeable_string& password, const std::string &device_name,
bool create_address_file, std::optional<std::string> hwdev_label, std::function<void(std::string msg)> progress_callback)
bool create_address_file, std::optional<std::string> hwdev_label, bool debug_reset_device, std::function<void(std::string msg)> progress_callback)
{
clear();
prepare_file_names(wallet_);
@ -4948,7 +4950,7 @@ void wallet2::restore_from_device(const fs::path& wallet_, const epee::wipeable_
hwdev.set_derivation_path(m_device_derivation_path);
hwdev.set_callback(get_device_callback());
m_account.create_from_device(hwdev);
m_account.create_from_device(hwdev, debug_reset_device);
init_type(m_account.get_device().get_type());
setup_keys(password);
if (progress_callback)

View File

@ -537,7 +537,8 @@ private:
* \param status_callback callback to invoke with progress messages to display to the user
*/
void restore_from_device(const fs::path& wallet_, const epee::wipeable_string& password, const std::string &device_name,
bool create_address_file = false, std::optional<std::string> hwdev_label = std::nullopt, std::function<void(std::string msg)> status_callback = {});
bool create_address_file = false, std::optional<std::string> hwdev_label = std::nullopt, bool debug_reset_device = false,
std::function<void(std::string msg)> status_callback = {});
/*!
* \brief Creates a multisig wallet

View File

@ -2413,6 +2413,14 @@ namespace {
if (!wal)
throw wallet_rpc_error{error_code::UNKNOWN_ERROR, "Failed to create wallet"};
if (req.subaddress_lookahead_major || req.subaddress_lookahead_minor)
{
if (!(req.subaddress_lookahead_major && req.subaddress_lookahead_minor))
throw wallet_rpc_error{error_code::UNKNOWN_ERROR, "Must specify subaddress lookahead major AND minor if specifying either"};
wal->set_subaddress_lookahead(*req.subaddress_lookahead_major, *req.subaddress_lookahead_minor);
}
if (!req.hardware_wallet)
wal->set_seed_language(req.language);
@ -2424,7 +2432,8 @@ namespace {
wal->set_refresh_from_block_height(hres.height);
if (req.hardware_wallet)
wal->restore_from_device(wallet_file, req.password, req.device_name.empty() ? "Ledger" : req.device_name);
wal->restore_from_device(wallet_file, req.password, req.device_name.empty() ? "Ledger" : req.device_name,
false, std::nullopt, req.debug_reset);
else
wal->generate(wallet_file, req.password);

View File

@ -883,6 +883,9 @@ KV_SERIALIZE_MAP_CODE_BEGIN(CREATE_WALLET::request)
KV_SERIALIZE(hardware_wallet)
KV_SERIALIZE(device_name)
KV_SERIALIZE(device_label)
KV_SERIALIZE(debug_reset)
KV_SERIALIZE(subaddress_lookahead_major)
KV_SERIALIZE(subaddress_lookahead_minor)
KV_SERIALIZE_MAP_CODE_END()

View File

@ -1700,6 +1700,9 @@ namespace tools::wallet_rpc {
bool hardware_wallet; // Create this wallet from a connected hardware wallet. (`language` will be ignored).
std::string device_name; // When `hardware` is true, this specifies the hardware wallet device type (currently supported: "Ledger"). If omitted "Ledger" is used.
std::optional<std::string> device_label; // Custom label to write to a `wallet.hwdev.txt`. Can be empty; omit the parameter entirely to not write a .hwdev.txt file at all.
bool debug_reset; // Can be specified as true to force a hardware wallet in DEBUG mode to reset (and switch networks, if necessary). Will fail if the hardware wallet is not compiled in debug mode.
std::optional<uint32_t> subaddress_lookahead_major; // how many "accounts" to compute subaddress keys for
std::optional<uint32_t> subaddress_lookahead_minor; // how many subaddresses per "account" to compute keys for
KV_MAP_SERIALIZABLE
};

View File

@ -63,6 +63,7 @@ add_subdirectory(block_weight)
add_subdirectory(hash)
add_subdirectory(net_load_tests)
add_subdirectory(network_tests)
add_subdirectory(ledger)
if (ANDROID)
# Currently failed to compile
# add_subdirectory(libwallet_api_tests)

View File

@ -0,0 +1,11 @@
execute_process(COMMAND ${Python_EXECUTABLE} "-c" "import requests, pytest" OUTPUT_QUIET ERROR_QUIET RESULT_VARIABLE _python_import_result)
if(NOT _python_import_result)
message(STATUS "Ledger wallet tests enabled")
get_target_property(_bin_dir daemon RUNTIME_OUTPUT_DIRECTORY)
add_test(NAME ledger_tests
COMMAND ${Python_EXECUTABLE} -m pytest "${CMAKE_CURRENT_SOURCE_DIR}/" "--binary-dir=${_bin_dir}")
else()
message(WARNING "Ledger tests not enabled: Python 3 with requests & pytest required")
endif()

76
tests/ledger/README.md Normal file
View File

@ -0,0 +1,76 @@
# Ledger hardware wallet test suite
This directory contains the Ledger hardware wallet test suite for testing the interactions of the
Oxen wallet (via the oxen-rpc-wallet) with a Ledger device.
It works by booting up a new "fakechain" oxen network for each set of tests where it mines a few
blocks and sets up wallets that interact to test various Ledger wallet functionality. The test
suite itself manages this fake network and wallets; you do not need to do anything to run this fake
network.
## Requirements
1. Compiled oxend and oxen-wallet-cli binaries. By default the test suite looks in ../../build/bin
but you can specify a different path by running the tests with the `--binary-dir=...` argument.
The build must include Ledger support, which
requires libhidapi-dev on the system; during cmake invocation there should be a line such as:
-- Using HIDAPI /usr/lib/x86_64-linux-gnu/libhidapi-libusb.so (includes at /usr/include/hidapi)
If it instead gives a message about HIDAPI not found then you will need to install the headers
and rebuild.
2. Running the test code on the client side requires Python 3.8 (or higher) with
[pytest](https://pytest.org) and the `requests` modules installed.
3. A debug build of the [Oxen Ledger hardware wallet app](https://github.com/LedgerHQ/app-oxen). As
per Ledger requirements, this is built inside a docker container, using `BOLOS_SDK=$NANOS_SDK
make DEBUG=1` from the app directory (changing the device SDK as needed for the device type to be
tested).
4. A working [Speculos device emulator](https://github.com/LedgerHQ/speculos) to emulate the
hardware wallet and run the wallet code.
## Running the tests
### Starting the emulator
Start the speculos emulator using:
python3 /path/to/speculos/speculos.py /path/to/bin/app.elf -m nanos
for a Nano S emulator; change `nanos` to `nanox` to emulate the Nano X.
`app.elf` here is the app built in the app-oxen repository.
Then the tests start running you should see an emulated Ledger screen appear with a testnet wallet
(starting with `T`). If it comes up with a mainnet Oxen wallet (starting with `L`) then you are not
running a debug build and should rebuild the device application.
Leave speculos running for the duration of the tests.
### Pytest
With the emulator running, invoke `pytest` (or `python3 -mpytest` if a pytest binary is not
installed) from the tests/ledger directory of the oxen-core project. You should start to see it
running the tests, and should activity in speculos (both in its terminal, and on the screen).
Running the full test suite takes about 3-5 minutes.
#### Advanced testing output
- If you want more verbosity as the tests run add `-vv` to the pytest invocation.
- To run a specific test use `-k test_whatever` to run just tests matching `test_whatever`. For
example, `-k test_transfers.py` will run just the transfer tests, and `-k test_sn_stake` will run
just the SN staking test. `pytest --collect-only` will list all available tests.
- For extremely verbose output use `-vv -s`; this increase verbosity *and* adds various test suite
debugging statements as the tests run.
- Each test creates temporary directories for the oxend and oxen-wallet-rpc instances that get
created; if you run with `-vv -s` the debug output will include the path where these are being
created; typically /tmp/pytest-of-$USERNAME/pytest-current will symlink to the latest run. You
can drill into these directories to look at oxend or oxen-wallet-rpc logs, if necessary for
diagnosing test issues.

91
tests/ledger/conftest.py Normal file
View File

@ -0,0 +1,91 @@
#!/usr/bin/python3
import pytest
import os.path
import service_node_network
from ledgerapi import LedgerAPI
from daemons import Wallet
def pytest_addoption(parser):
parser.addoption("--binary-dir", default="../../build/bin", action="store")
parser.addoption("--ledger-apdu", default="127.0.0.1:9999", action="store")
parser.addoption("--ledger-api", default="http://127.0.0.1:5000", action="store")
def pytest_collection_modifyitems(session, config, items):
"""Reorders the tests more logically than the default alphabetical order"""
pos = {"test_basic.py": 1, "test_transfers.py": 2, "test_sn.py": 3, "test_ons.py": 4, "": 5}
items.sort(key=lambda i: pos.get(i.parent.name, pos[""]))
@pytest.fixture(scope="session")
def binary_dir(request):
binpath = request.config.getoption("--binary-dir")
for exe in ("oxend", "oxen-wallet-rpc"):
b = f"{binpath}/{exe}"
if not os.path.exists(b):
raise FileNotFoundError(
b,
f"Required executable ({b}) not found; build the project, or specify an alternate build/bin dir with --binary-dir",
)
return binpath
@pytest.fixture(scope="session")
def ledger(request):
l = LedgerAPI(request.config.getoption("--ledger-api"))
if l.buggy_S:
import warnings
warnings.warn("Detected Speculos buggy 'S' handling (issue #204); applying workarounds")
return l
@pytest.fixture
def net(pytestconfig, tmp_path, binary_dir):
return service_node_network.basic_net(pytestconfig, tmp_path, binary_dir)
@pytest.fixture
def hal(net, request):
"""
`hal` is a Ledger hardware-backed wallet.
"""
hal = Wallet(
node=net.nodes[0],
name="HAL",
rpc_wallet=net.binpath + "/oxen-wallet-rpc",
datadir=net.datadir,
ledger_api=request.config.getoption("--ledger-api"),
ledger_apdu=request.config.getoption("--ledger-apdu"),
)
hal.ready(wallet="HAL")
return hal
@pytest.fixture
def mike(net):
return net.mike
@pytest.fixture
def alice(net):
return net.alice
@pytest.fixture
def bob(net):
return net.bob
# Gives you an (unstaked) sn
@pytest.fixture
def sn(net):
return net.unstaked_sns[0]

1
tests/ledger/daemons.py Symbolic link
View File

@ -0,0 +1 @@
../network_tests/daemons.py

211
tests/ledger/expected.py Normal file
View File

@ -0,0 +1,211 @@
import re
import time
from concurrent.futures import ThreadPoolExecutor
from vprint import vprint
executor = ThreadPoolExecutor(max_workers=1)
class MatchScreen:
"""
Provides a call operator that matches each device line against a regex for the line. Can
optionally invoke callback when all the regexes match, e.g. to perform additional checks or
extract data. Note that regexes use .search, so should be anchored with ^ as needed.
If allow_extra is given and True then the `ledger.curr()` is permitted to return results longer
than the regex list; only the first `len(regexes)` elements are tested.
If fail_index is given it should be an index >= 1 from which mismatches should be considered
fatal: if the items before `fail_index` match the screen, then the ones from `fail_index`
onwards *must* match or else we fatally fail with an exception. This can be used, for example,
to match something like `['Confirm Amount', '123']`: using fail_index=1 we would immediately
fail (with exception) the test if we see `Confirm Amount` on the screen with any other value.
(Without fail_index, we would keep re-testing the screen in such a case).
If callback is specified and has a return value then it is cast that to bool and return it. If
not specified, has no return, or returns None then True is returned after calling the callback.
callback, if given, will be invoked as `callback(curr_text, match_objects)` and can:
- return a truthy value, None, or no return value to pass the interaction/match and proceed to
the next interaction
- return a falsey value (other than None) to fail the match and repeat the interaction
- throw an exception to fail the test
"""
def __init__(self, regexes, callback=None, *, allow_extra=False, fail_index=None):
self.regexes = [re.compile(r) for r in regexes]
self.callback = callback
self.allow_extra = allow_extra
self.fail_index = fail_index or len(self.regexes)
self.desc = f"screen match: {regexes}"
def __call__(self, ledger, *, immediate=False):
text = ledger.curr()
extra = len(text) - len(self.regexes)
if extra >= 0 if self.allow_extra else extra == 0:
matches = []
for i in range(len(self.regexes)):
matches.append(self.regexes[i].search(text[i]))
if not matches[-1]:
if i >= self.fail_index or immediate:
vprint(f"fatal match fail: {text} against {self.desc}")
raise ValueError(f"wrong screen value: {text}, expected {self.desc}")
return False
if self.callback:
res = self.callback(text, matches)
if res is not None:
res = bool(res)
if immediate and not res:
raise ValueError(f"wrong screen value: {text}, expected {self.desc}")
return res
return True
if immediate:
raise ValueError(f"Wrong screen value: {text}")
return False
class ExactScreen(MatchScreen):
"""
Convenience wrapper around MatchScreen that
Provides a call operator that returns True if we get an exact match on the ledger device, False
otherwise. `result` should be a list of strings (to match the result of ledger.curr()).
Other arguments are forwarded to MatchScreen.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.made_buggy = False
def __call__(self, ledger, *args, **kwargs):
if not self.made_buggy:
self.made_buggy = True
# Work around Speculos bugs:
# ledger.buggy_S - can't read "S" off the Nano X screen:
# https://github.com/LedgerHQ/speculos/issues/204
if ledger.buggy_S:
for i in range(len(self.regexes)):
self.regexes[i] = re.compile(self.regexes[i].pattern.replace("S", "S?"))
return super().__call__(ledger, *args, **kwargs)
class MatchMulti:
"""
Matches a multi-valued value on the screen, expected to be displayed as `{title} 1/N` through
`{title} N/N` subscreens; once we match the first screen, we page through the rest,
concatenating the values. The final, concatenated value must match `value` (unless `value` is
None).
callback, if given, is invoked with the final, concatenated value. (This can be used, for
instance, with value=None to allow capturing the value). Unlike MatchScreen, the callback's
return value is ignored, but the callback can still throw or assert to cause a test failure.
"""
def __init__(self, title, value, callback=None):
self.title = title
self.expected = value
self.re = re.compile("^" + re.escape(title) + r" \(1/(\d+)\)$")
self.callback = callback
self.desc = f"multi-value {title}"
def __call__(self, ledger, immediate=False):
text = ledger.curr()
if len(text) < 2:
return False
m = self.re.search(text[0])
if not m:
return False
val = ledger.read_multi_value(self.title)
if self.expected is not None:
if val != self.expected:
if ledger.buggy_S and self.expected.replace("S", "") == val:
pass
else:
raise ValueError(
f"{self.title} value {val} did not match expected {self.expected}"
)
if self.callback:
self.callback(val)
return True
class Do:
"""Fake matcher that just does some side effect (passing the ledger) and always returns True"""
def __init__(self, action, desc=None):
self.action = action
if desc:
self.desc = desc
def __call__(self, ledger, immediate=False):
self.action(ledger)
return True
# Static Do objects that do a right/left/both push when invoked
Do.right = Do(lambda ledger: ledger.right(), desc="push right")
Do.left = Do(lambda ledger: ledger.left(), desc="push left")
Do.both = Do(lambda ledger: ledger.both(), desc="push both")
def run_with_interactions(ledger, main, *interactions, timeout=30, poll=0.25):
"""
Uses a thread to call `main` and the given interactions in parallel.
Each interaction is a callable that is passed the ledger instance and returns True if it
succeeded, False if it did not match. Upon a True return we move on to the next interaction and
call it repeatedly (with delay `poll`) until it returns True, etc.
If the timeout is reached, or the `main` command finishes, before all interactions pass then we
raise an exception.
In either case, we wait for `main` to finish and (if interactions passed) return its result or
exception; otherwise we raise an error for the interactions timeout.
"""
future = executor.submit(main)
timeout_at = time.time() + timeout
int_fail = None
try:
for f in interactions:
while time.time() < timeout_at and not future.done():
if f(ledger):
vprint(f"Interaction success: {f.desc if hasattr(f, 'desc') else f}")
break
time.sleep(poll)
else:
desc = getattr(f, "desc", "device interaction")
if time.time() < timeout_at:
raise EOFError(f"command finished before {desc} completed")
else:
raise TimeoutError(f"timeout waiting for {desc}")
except Exception as e:
int_fail = e
if int_fail is not None:
try:
future.result()
except Exception as e:
# Both raised, so throw containing both messages:
raise RuntimeError(
"Failed to run with interactions:\n"
f"Run failure: {e}\n"
f"Interactions failure: {int_fail}"
)
raise int_fail
return future.result()
def check_interactions(ledger, *interactions):
"""Sort of like run_with_interactions except without a separate task to run, and without
polling/timeouts: this expects all the given interacts to run and match immediately."""
for f in interactions:
f(ledger, immediate=True)

113
tests/ledger/ledgerapi.py Normal file
View File

@ -0,0 +1,113 @@
import requests
import urllib.parse
import time
import re
class SingleBaseSession(requests.Session):
def __init__(self, base_url):
super().__init__()
self.base_url = base_url
def request(self, method, url, *args, **kwargs):
return super().request(method, urllib.parse.urljoin(self.base_url, url), *args, **kwargs)
class LedgerAPI:
def __init__(self, api_url):
self.api = SingleBaseSession(api_url)
self._detect_buggy_crap()
def curr(self):
"""Returns the text of events on the current screen"""
return [e["text"] for e in self.api.get("/events?currentscreenonly=true").json()["events"]]
def _touch(self, which, count, action, delay, sleep):
json = {"action": action}
if delay:
json["delay"] = delay
for _ in range(count):
self.api.post(f"/button/{which}", json=json)
if sleep:
time.sleep(sleep)
def left(self, count=1, *, sleep=0, action="press-and-release", delay=None):
"""
Hit the left button `count` times; sleeps for `sleep` seconds after each push to wait for it
to register.
"""
self._touch("left", count, action, delay, sleep)
def right(self, count=1, *, sleep=0, action="press-and-release", delay=None):
"""
Hit the right button `count` times; sleeps for `sleep` seconds after each push to wait for
it to register.
"""
self._touch("right", count, action, delay, sleep)
def both(self, *, sleep=0, action="press-and-release", delay=None):
"""
Hit both buttons simultaneously; sleeps for `sleep` seconds after pushing to wait for it to
register.
"""
self._touch("both", 1, action, delay, sleep)
def read_multi_value(self, title):
"""Feed this the ledger on the first "{title} (1/N)" screen and it will read through,
collect the multi-part value, and return it. Throws ValueError if there aren't screens 1/N
through N/N. Leaves the ledger on the final (N/N) screen."""
text = self.curr()
disp_n = re.search("^" + re.escape(title) + r" \(1/(\d+)\)$", text[0])
if not disp_n:
raise ValueError(f"Did not match a multi-screen {title} value: {text}")
disp_n = int(disp_n[1])
full_value = "".join(text[1:])
i = 1
while i < disp_n:
self.right()
i += 1
text = self.curr()
expected = f"{title} ({i}/{disp_n})"
if text[0] != expected:
raise ValueError(
f"Unexpected multi-screen value: expected {expected}, got {text[0]}"
)
full_value += "".join(text[1:])
return full_value
def _detect_buggy_crap(self):
"""Detects buggy speculos inability to detect capital S's on the Nano X screen. This should
be called when the device is on the main screen."""
assert self.curr()[0] == "OXEN wallet"
self.right()
self.both()
self.right(4)
buggy_s_re = re.compile("^(S?)elect Network$")
for t in self.curr():
m = buggy_s_re.search(t)
if m:
self.buggy_S = len(m[1]) == 0
self.right(3)
self.both()
break
else:
raise RuntimeError(
"Did not find S?elect Network; perhaps the device was not on the main screen?"
)
def buggy_crap(self, x):
if not self.buggy_S:
return x
if any(isinstance(x, t) for t in (int, float)):
return x
if isinstance(x, str):
return x.replace("S", "")
if isinstance(x, list):
return [self.buggy_crap(i) for i in x]
if isinstance(x, tuple):
return tuple(self.buggy_crap(i) for i in x)
if isinstance(x, dict):
return {self.buggy_crap(k): self.buggy_crap(v) for k, v in x.items()}
raise ValueError(f"Don't know how to bug-accomodate {type(x)}")

View File

@ -0,0 +1 @@
../network_tests/service_node_network.py

View File

@ -0,0 +1,36 @@
from expected import *
def test_init(net, mike, hal, ledger):
"""
Tests that the node fakenet got initialized properly, and that the wallet starts up and shows
the right address.
"""
# All nodes should be at the same height:
heights = [x.rpc("/get_height").json()["height"] for x in net.all_nodes]
height = max(heights)
assert heights == [height] * len(net.all_nodes)
assert mike.height(refresh=True) == height
assert mike.balances() > (0, 0)
assert hal.height(refresh=True) == height
assert hal.balances() == (0, 0)
address = hal.address()
def check_addr(_, m):
assert address.startswith(m[1][1]) and address.endswith(m[1][2])
check_interactions(
ledger,
MatchScreen([r"^OXEN wallet$", r"^(\w+)\.\.(\w+)$"], check_addr),
Do.both, # Hitting both on the main screen shows us the full address details
ExactScreen(["Regular address", "(fakenet)"]),
Do.right,
MatchMulti("Address", address),
Do.right,
ExactScreen(["Back"]),
Do.both,
MatchScreen([r"^OXEN wallet$", r"^(\w+)\.\.(\w+)$"], check_addr),
)

449
tests/ledger/test_ons.py Normal file
View File

@ -0,0 +1,449 @@
import pytest
from functools import partial
from utils import *
from expected import *
import daemons
ONS_BASE_FEE = 7
def test_ons_buy(net, mike, hal, ledger):
mike.transfer(hal, coins(10))
net.mine()
assert hal.balances(refresh=True) == coins(10, 10)
store_fee = [StoreFee() for _ in range(3)]
run_with_interactions(
ledger,
partial(
hal.buy_ons,
"session",
"testsession",
"05ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
backup_owner="0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef",
),
ExactScreen(["Processing ONS"]),
MatchScreen(
[r"^Confirm ONS Fee$", rf"^({ONS_BASE_FEE}\.\d{{1,9}})$"], store_fee[0], fail_index=1
),
Do.right,
ExactScreen(["Accept"]),
Do.right,
ExactScreen(["Reject"]),
Do.left,
Do.both,
ExactScreen(["Processing ONS"]),
)
mike.transfer(hal, coins(10))
net.mine()
assert hal.balances(refresh=True) == balance(20 - store_fee[0].fee)
run_with_interactions(
ledger,
partial(hal.buy_ons, "wallet", "testwallet", mike.address()),
ExactScreen(["Processing ONS"]),
MatchScreen(
[r"^Confirm ONS Fee$", rf"^({ONS_BASE_FEE}\.\d{{1,9}})$"], store_fee[1], fail_index=1
),
Do.right,
ExactScreen(["Accept"]),
Do.both,
ExactScreen(["Processing ONS"]),
)
mike.transfer(hal, coins(50))
net.mine()
assert hal.balances(refresh=True) == balance(70 - store_fee[0].fee - store_fee[1].fee)
run_with_interactions(
ledger,
partial(
hal.buy_ons,
"lokinet_10y",
"test.loki",
"yoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyo.loki",
),
ExactScreen(["Processing ONS"]),
MatchScreen(
[r"^Confirm ONS Fee$", rf"^({6*ONS_BASE_FEE}\.\d{{1,9}})$"], store_fee[2], fail_index=1
),
Do.right,
ExactScreen(["Accept"]),
Do.both,
ExactScreen(["Processing ONS"]),
)
net.mine()
assert hal.balances(refresh=True) == balance(70 - sum(s.fee for s in store_fee))
assert hal.get_ons() == [
{
"type": "lokinet",
"name": "test.loki",
"hashed": "onTp6G7+2UEwBMEPjK149gY5phWt6SbhgkQYD5DBMXU=",
"value": "yoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyo.loki",
"owner": hal.address(),
},
{
"type": "session",
"name": "testsession",
"hashed": "IcWqJAa2t5u4WMgDu6c6O1GvbI80r/GLUCVBZ8P/UlQ=",
"value": "05ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
"owner": hal.address(),
"backup_owner": "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef",
},
{
"type": "wallet",
"name": "testwallet",
"hashed": "bFhh6FtiV16PT3twIllC8zyxU3E2sS0AilOkcv69WB8=",
"value": mike.address(),
"owner": hal.address(),
},
]
def test_ons_update(net, mike, hal, ledger):
mike.buy_ons(
"session",
"testsession",
"05ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
backup_owner=hal.address(),
)
mike.buy_ons("wallet", "testwallet", mike.address(), backup_owner=hal.address())
mike.transfer(hal, coins(ONS_BASE_FEE + 1))
for _ in range(5):
mike.refresh()
mike.transfer(hal, coins(1))
net.mine(3)
net.mine(6)
mike.buy_ons(
"lokinet_10y",
"test.loki",
"yoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyo.loki",
backup_owner=hal.address(),
)
net.mine(1)
hal.refresh()
run_with_interactions(
ledger,
partial(
hal.buy_ons,
"session",
"another",
"05aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
),
ExactScreen(["Processing ONS"]),
MatchScreen([r"^Confirm ONS Fee$", rf"^{ONS_BASE_FEE}\.\d{{1,9}}$"], fail_index=1),
Do.right,
ExactScreen(["Accept"]),
Do.both,
ExactScreen(["Processing ONS"]),
)
net.mine(1)
# ONS has a bug where you can't *clear* a backup owner, nor can you set both owner and
# backup_owner to yourself, so we stuff in this dummy backup_owner in lieu of being able to
# clear it:
no_backup = "0000000000000000000000000000000000000000000000000000000000000000"
run_with_interactions(
ledger,
partial(
hal.update_ons,
"session",
"testsession",
value="05eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee",
owner=hal.address(),
backup_owner=no_backup,
),
ExactScreen(["Confirm Oxen", "Name Service TX"]),
Do.right,
ExactScreen(["Accept"]),
Do.both,
ExactScreen(["Processing ONS"]),
MatchScreen([r"^Confirm ONS Fee$", r"^0\.\d{1,9}$"], fail_index=1),
Do.right,
ExactScreen(["Accept"]),
Do.both,
ExactScreen(["Processing ONS"]),
)
run_with_interactions(
ledger,
partial(
hal.update_ons,
"wallet",
"testwallet",
value=hal.address(),
owner=hal.address(),
backup_owner=no_backup,
),
ExactScreen(["Confirm Oxen", "Name Service TX"]),
Do.right,
ExactScreen(["Accept"]),
Do.both,
ExactScreen(["Processing ONS"]),
MatchScreen([r"^Confirm ONS Fee$", r"^0\.\d{1,9}$"], fail_index=1),
Do.right,
ExactScreen(["Accept"]),
Do.both,
ExactScreen(["Processing ONS"]),
)
run_with_interactions(
ledger,
partial(
hal.update_ons,
"lokinet",
"test.loki",
value="444444444444444444444444444444444444444444444444444o.loki",
),
ExactScreen(["Confirm Oxen", "Name Service TX"]),
Do.right,
ExactScreen(["Accept"]),
Do.both,
ExactScreen(["Processing ONS"]),
MatchScreen([r"^Confirm ONS Fee$", r"^0\.\d{1,9}$"], fail_index=1),
Do.right,
ExactScreen(["Accept"]),
Do.both,
ExactScreen(["Processing ONS"]),
)
run_with_interactions(
ledger,
partial(
hal.update_ons,
"session",
"another",
value="051234123412341234123412341234123412341234123412341234123412341234",
backup_owner="2222333322223333222233332222333322223333222233332222333322223333",
),
ExactScreen(["Confirm Oxen", "Name Service TX"]),
Do.right,
ExactScreen(["Accept"]),
Do.both,
ExactScreen(["Processing ONS"]),
MatchScreen([r"^Confirm ONS Fee$", r"^0\.\d{1,9}$"], fail_index=1),
Do.right,
ExactScreen(["Accept"]),
Do.both,
ExactScreen(["Processing ONS"]),
)
net.mine(1)
hal.refresh()
assert hal.get_ons() == [
{
"type": "lokinet",
"name": "test.loki",
"hashed": "onTp6G7+2UEwBMEPjK149gY5phWt6SbhgkQYD5DBMXU=",
"value": "444444444444444444444444444444444444444444444444444o.loki",
"owner": mike.address(),
"backup_owner": hal.address(),
},
{
"type": "session",
"name": "another",
"hashed": "ZvuFxErXKyzGIPhiXjlxOLADdwaG/APS6AH+Qq4Bw0o=",
"value": "051234123412341234123412341234123412341234123412341234123412341234",
"owner": hal.address(),
"backup_owner": "2222333322223333222233332222333322223333222233332222333322223333",
},
{
"type": "session",
"name": "testsession",
"hashed": "IcWqJAa2t5u4WMgDu6c6O1GvbI80r/GLUCVBZ8P/UlQ=",
"value": "05eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee",
"owner": hal.address(),
},
{
"type": "wallet",
"name": "testwallet",
"hashed": "bFhh6FtiV16PT3twIllC8zyxU3E2sS0AilOkcv69WB8=",
"value": hal.address(),
"owner": hal.address(),
},
]
def test_ons_renew(net, mike, hal, ledger):
for _ in range(5):
mike.transfer(hal, coins(50))
net.mine(1)
net.mine(9)
bal = 250
assert hal.balances(refresh=True) == balance(bal)
store_fee = StoreFee()
run_with_interactions(
ledger,
partial(
hal.buy_ons,
"lokinet_2y",
"test.loki",
"yoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyo.loki",
),
ExactScreen(["Processing ONS"]),
MatchScreen(
[r"^Confirm ONS Fee$", rf"^({2*ONS_BASE_FEE}\.\d{{1,9}})$"], store_fee, fail_index=1
),
Do.right,
ExactScreen(["Accept"]),
Do.both,
ExactScreen(["Processing ONS"]),
)
bal -= store_fee.fee
net.mine(1)
reg_height = net.nodes[0].height() - 1
# On regtest our 1/2/5/10-year expiries become 2/4/10/20 *blocks* for expiry testing purposes
exp_height = reg_height + 4
assert hal.get_ons(include_height=True) == [
{
"type": "lokinet",
"name": "test.loki",
"hashed": "onTp6G7+2UEwBMEPjK149gY5phWt6SbhgkQYD5DBMXU=",
"value": "yoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyo.loki",
"owner": hal.address(),
"update_height": reg_height,
"expiration_height": exp_height,
}
]
run_with_interactions(
ledger,
partial(hal.renew_ons, "lokinet_5y", "test.loki"),
ExactScreen(["Processing ONS"]),
MatchScreen(
[r"^Confirm ONS Fee$", rf"^({4*ONS_BASE_FEE}\.\d{{1,9}})$"], store_fee, fail_index=1
),
Do.right,
ExactScreen(["Accept"]),
Do.both,
ExactScreen(["Processing ONS"]),
)
bal -= store_fee.fee
net.mine(1)
hal.refresh()
run_with_interactions(
ledger,
partial(hal.renew_ons, "lokinet", "test.loki"),
ExactScreen(["Processing ONS"]),
MatchScreen(
[r"^Confirm ONS Fee$", rf"^({ONS_BASE_FEE}\.\d{{1,9}})$"], store_fee, fail_index=1
),
Do.right,
ExactScreen(["Accept"]),
Do.both,
ExactScreen(["Processing ONS"]),
)
bal -= store_fee.fee
net.mine(2)
assert hal.get_ons(include_height=True) == [
{
"type": "lokinet",
"name": "test.loki",
"hashed": "onTp6G7+2UEwBMEPjK149gY5phWt6SbhgkQYD5DBMXU=",
"value": "yoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyo.loki",
"owner": hal.address(),
"update_height": reg_height + 2,
"expiration_height": exp_height + 10 + 2,
}
]
run_with_interactions(
ledger,
partial(hal.renew_ons, "lokinet_10y", "test.loki"),
ExactScreen(["Processing ONS"]),
MatchScreen(
[r"^Confirm ONS Fee$", rf"^({6*ONS_BASE_FEE}\.\d{{1,9}})$"], store_fee, fail_index=1
),
Do.right,
ExactScreen(["Accept"]),
Do.both,
ExactScreen(["Processing ONS"]),
)
net.mine(10)
bal -= store_fee.fee
assert hal.balances(refresh=True) == balance(bal)
assert hal.get_ons(include_height=True) == [
{
"type": "lokinet",
"name": "test.loki",
"hashed": "onTp6G7+2UEwBMEPjK149gY5phWt6SbhgkQYD5DBMXU=",
"value": "yoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyo.loki",
"owner": hal.address(),
"update_height": reg_height + 4,
"expiration_height": exp_height + 10 + 2 + 20,
}
]
def test_ons_reject(net, mike, hal, ledger):
mike.transfer(hal, coins(100))
net.mine(10)
assert hal.balances(refresh=True) == balance(100)
with pytest.raises(RuntimeError, match=r'.*Fee denied on device\.$'):
run_with_interactions(
ledger,
partial(
hal.buy_ons,
"lokinet_10y",
"test.loki",
"yoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyo.loki",
),
ExactScreen(["Processing ONS"]),
MatchScreen([r"^Confirm ONS Fee$", rf"^({6*ONS_BASE_FEE}\.\d{{1,9}})$"], fail_index=1),
Do.right,
ExactScreen(["Accept"]),
Do.right,
ExactScreen(["Reject"]),
Do.both,
)
store_fee = StoreFee()
run_with_interactions(
ledger,
partial(
hal.buy_ons,
"lokinet_10y",
"test.loki",
"yoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyoyo.loki",
),
ExactScreen(["Processing ONS"]),
MatchScreen(
[r"^Confirm ONS Fee$", rf"^({6*ONS_BASE_FEE}\.\d{{1,9}})$"], store_fee, fail_index=1
),
Do.right,
ExactScreen(["Accept"]),
Do.both,
)
net.mine(10)
hal.refresh()
with pytest.raises(RuntimeError, match=r'.*Fee denied on device\.$'):
run_with_interactions(
ledger,
partial(hal.renew_ons, "lokinet_5y", "test.loki"),
ExactScreen(["Processing ONS"]),
MatchScreen([r"^Confirm ONS Fee$", rf"^({4*ONS_BASE_FEE}\.\d{{1,9}})$"], fail_index=1),
Do.right,
ExactScreen(["Accept"]),
Do.right,
ExactScreen(["Reject"]),
Do.both,
)
assert hal.balances(refresh=True) == balance(100 - store_fee.fee)
net.mine(1)
assert hal.balances(refresh=True) == balance(100 - store_fee.fee)

159
tests/ledger/test_sn.py Normal file
View File

@ -0,0 +1,159 @@
import pytest
from functools import partial
from utils import *
from expected import *
def check_sn_rewards(net, hal, sn, starting_bal, reward):
net.mine(5) # 5 blocks until it starts earning rewards (testnet/fakenet)
hal_bal = hal.balances(refresh=True)
batch_offset = None
assert hal_bal == coins(starting_bal, 0)
# We don't know where our batch payment occurs yet, but let's look for it:
for i in range(20):
net.mine(1)
if hal.balances(refresh=True)[0] > coins(starting_bal):
batch_offset = sn.height() % 20
break
assert batch_offset is not None
hal_bal = hal.balances()
net.mine(19)
assert hal.balances(refresh=True)[0] == hal_bal[0]
net.mine(1) # Should be our batch height
assert hal.balances(refresh=True)[0] == hal_bal[0] + coins(20 * reward)
def test_sn_register(net, mike, hal, ledger, sn):
mike.transfer(hal, coins(101))
net.mine()
assert hal.balances(refresh=True) == coins(101, 101)
store_fee = StoreFee()
run_with_interactions(
ledger,
partial(hal.register_sn, sn),
ExactScreen(["Processing Stake"]),
MatchScreen([r"^Confirm Fee$", r"^(0\.01\d{1,7})$"], store_fee, fail_index=1),
Do.right,
ExactScreen(["Accept"]),
Do.both,
ExactScreen(["Confirm Stake", "100.0"], fail_index=1),
Do.right,
ExactScreen(["Accept"]),
Do.right,
ExactScreen(["Reject"]),
Do.left,
Do.both,
ExactScreen(["Processing Stake"]),
)
# We are half the SN network, so get half of the block reward per block:
reward = 0.5 * 16.5
check_sn_rewards(net, hal, sn, 101 - store_fee.fee, reward)
def test_sn_stake(net, mike, alice, hal, ledger, sn):
mike.multi_transfer([hal, alice], coins(13.02, 87.02))
net.mine()
assert hal.balances(refresh=True) == coins(13.02, 13.02)
assert alice.balances(refresh=True) == coins(87.02, 87.02)
alice.register_sn(sn, stake=coins(87))
net.mine(1)
store_fee = StoreFee()
run_with_interactions(
ledger,
partial(hal.stake_sn, sn, coins(13)),
ExactScreen(["Processing Stake"]),
MatchScreen([r"^Confirm Fee$", r"^(0\.01\d{1,7})$"], store_fee, fail_index=1),
Do.right,
ExactScreen(["Accept"]),
Do.both,
ExactScreen(["Confirm Stake", "13.0"], fail_index=1),
Do.right,
ExactScreen(["Accept"]),
Do.right,
ExactScreen(["Reject"]),
Do.left,
Do.both,
ExactScreen(["Processing Stake"]),
)
# Our SN is 1 or 2 registered, so we get 50% of the 16.5 reward, 10% is removed for operator
# fee, then hal gets 13/100 of the rest:
reward = 0.5 * 16.5 * 0.9 * 0.13
check_sn_rewards(net, hal, sn, 13.02 - store_fee.fee, reward)
def test_sn_reject(net, mike, hal, ledger, sn):
mike.transfer(hal, coins(101))
net.mine()
assert hal.balances(refresh=True) == coins(101, 101)
store_fee = StoreFee()
with pytest.raises(RuntimeError, match=r"Fee denied on device\.$"):
run_with_interactions(
ledger,
partial(hal.register_sn, sn),
ExactScreen(["Processing Stake"]),
MatchScreen([r"^Confirm Fee$", r"^(0\.01\d{1,7})$"], store_fee, fail_index=1),
Do.right,
Do.right,
ExactScreen(["Reject"]),
Do.both,
)
with pytest.raises(RuntimeError, match=r"Transaction denied on device\.$"):
run_with_interactions(
ledger,
partial(hal.register_sn, sn),
ExactScreen(["Processing Stake"]),
MatchScreen([r"^Confirm Fee$", r"^(0\.01\d{1,7})$"], store_fee, fail_index=1),
Do.right,
ExactScreen(["Accept"]),
Do.both,
ExactScreen(["Confirm Stake", "100.0"], fail_index=1),
Do.right,
ExactScreen(["Accept"]),
Do.right,
ExactScreen(["Reject"]),
Do.both,
)
def test_sn_unstake(net, mike, hal, ledger, sn):
# Do the full registration:
test_sn_register(net, mike, hal, ledger, sn)
run_with_interactions(
ledger,
partial(hal.unstake_sn, sn),
ExactScreen(["Confirm Service", "Node Unlock"]),
Do.right,
ExactScreen(["Accept"]),
Do.right,
ExactScreen(["Reject"]),
Do.left,
Do.both,
)
# A fakechain unlock takes 30 blocks, plus add another 20 just so we are sure we've received the
# last batch reward:
net.mine(30 + 20)
hal_bal = hal.balances(refresh=True)
net.mine(20)
assert hal.balances(refresh=True) == hal_bal

View File

@ -0,0 +1,337 @@
import pytest
from functools import partial
from utils import *
from expected import *
import daemons
def test_receive(net, mike, hal):
mike.transfer(hal, coins(100))
net.mine(blocks=2)
assert hal.balances(refresh=True) == coins(100, 0)
net.mine(blocks=7)
assert hal.balances(refresh=True) == coins(100, 0)
net.mine(blocks=1)
assert hal.balances(refresh=True) == coins(100, 100)
def test_send(net, mike, alice, hal, ledger):
mike.transfer(hal, coins(100))
net.mine()
hal.refresh()
store_fee = StoreFee()
run_with_interactions(
ledger,
partial(hal.transfer, alice, coins(42.5)),
ExactScreen(["Processing TX"]),
MatchScreen([r"^Confirm Fee$", r"^(0\.01\d{1,7})$"], store_fee, fail_index=1),
Do.right,
ExactScreen(["Accept"]),
Do.right,
ExactScreen(["Reject"]),
Do.left,
Do.both,
ExactScreen(["Confirm Amount", "42.5"], fail_index=1),
Do.right,
MatchMulti("Recipient", alice.address()),
Do.right,
ExactScreen(["Accept"]),
Do.right,
ExactScreen(["Reject"]),
Do.right, # This loops back around to the amount:
ExactScreen(["Confirm Amount", "42.5"]),
Do.left,
Do.left,
ExactScreen(["Accept"]),
Do.both,
ExactScreen(["Processing TX"]),
)
net.mine(1)
remaining = coins(100 - 42.5 - store_fee.fee)
hal_bal = hal.balances(refresh=True)
assert hal_bal[0] == remaining
assert hal_bal[1] < remaining
assert alice.balances(refresh=True) == coins(42.5, 0)
net.mine(9)
assert hal.balances(refresh=True) == (remaining, remaining)
assert alice.balances(refresh=True) == coins(42.5, 42.5)
def test_multisend(net, mike, alice, bob, hal, ledger):
mike.transfer(hal, coins(105))
net.mine()
assert hal.balances(refresh=True) == coins(105, 105)
store_fee = StoreFee()
recipient_addrs = []
def store_addr(val):
nonlocal recipient_addrs
recipient_addrs.append(val)
recipient_amounts = []
def store_amount(_, m):
nonlocal recipient_addrs
recipient_amounts.append(m[1][1])
recipient_expected = ledger.buggy_crap(
[
(alice.address(), "18.0"),
(bob.address(), "19.0"),
(alice.address(), "20.0"),
(alice.address(), "21.0"),
(hal.address(), "22.0"),
]
)
recipient_expected.sort()
hal.timeout = 120 # creating this tx with the ledger takes ages
run_with_interactions(
ledger,
partial(hal.multi_transfer, [alice, bob, alice, alice, hal], coins(18, 19, 20, 21, 22)),
ExactScreen(["Processing TX"]),
MatchScreen([r"^Confirm Fee$", r"^(0\.\d{1,9})$"], store_fee, fail_index=1),
Do.right,
ExactScreen(["Accept"]),
Do.both,
*(
cmds
for i in range(len(recipient_expected))
for cmds in [
MatchScreen([r"^Confirm Amount$", r"^(\d+\.\d+)$"], store_amount, fail_index=1),
Do.right,
MatchMulti("Recipient", None, callback=store_addr),
Do.right,
ExactScreen(["Accept"]),
Do.both,
]
),
ExactScreen(["Processing TX"]),
timeout=120,
)
recipient_got = list(zip(recipient_addrs, recipient_amounts))
recipient_got.sort()
assert recipient_expected == recipient_got
net.mine(1)
remaining = coins(105 - 100 - store_fee.fee + 22)
hal_bal = hal.balances(refresh=True)
assert hal_bal[0] == remaining
assert hal_bal[1] < remaining
assert alice.balances(refresh=True) == coins(18 + 20 + 21, 0)
assert bob.balances(refresh=True) == coins(19, 0)
net.mine(9)
assert hal.balances(refresh=True) == (remaining,) * 2
assert alice.balances(refresh=True) == balance(18 + 20 + 21)
assert bob.balances(refresh=True) == balance(19)
def test_reject_send(net, mike, alice, hal, ledger):
mike.transfer(hal, coins(100))
net.mine()
hal.refresh()
with pytest.raises(daemons.TransferFailed):
run_with_interactions(
ledger,
partial(hal.transfer, alice, coins(42.5)),
ExactScreen(["Processing TX"]),
MatchScreen([r"^Confirm Fee$", r"^(0\.01\d{1,7})$"], fail_index=1),
Do.right,
ExactScreen(["Accept"]),
Do.right,
ExactScreen(["Reject"]),
Do.both,
)
with pytest.raises(daemons.TransferFailed):
run_with_interactions(
ledger,
partial(hal.transfer, alice, coins(42.5)),
ExactScreen(["Processing TX"]),
MatchScreen([r"^Confirm Fee$", r"^(0\.01\d{1,7})$"], fail_index=1),
Do.right,
ExactScreen(["Accept"]),
Do.both,
ExactScreen(["Confirm Amount", "42.5"], fail_index=1),
Do.right,
MatchMulti("Recipient", alice.address()),
Do.right,
ExactScreen(["Accept"]),
Do.right,
ExactScreen(["Reject"]),
Do.both,
)
store_fee = StoreFee()
run_with_interactions(
ledger,
partial(hal.transfer, alice, coins(42.5)),
ExactScreen(["Processing TX"]),
MatchScreen([r"^Confirm Fee$", r"^(0\.01\d{1,7})$"], store_fee, fail_index=1),
Do.right,
ExactScreen(["Accept"]),
Do.both,
ExactScreen(["Confirm Amount", "42.5"], fail_index=1),
Do.right,
MatchMulti("Recipient", alice.address()),
Do.right,
ExactScreen(["Accept"]),
Do.both,
)
net.mine(10)
assert hal.balances(refresh=True) == balance(100 - 42.5 - store_fee.fee)
def test_subaddr_receive(net, mike, hal):
hal.json_rpc("create_address", {"count": 3})
subaddrs = [hal.get_subaddress(0, i) for i in range(1, 4)]
mike.multi_transfer(subaddrs, coins([5] * len(subaddrs)))
subaddr0 = "LQM2cdzDY311111111111111111111111111111111111111111111111111111111111111111111111111111116onhCC"
subaddrZ = "La3hdSoi9JWjpXCZedGfVQjpXCZedGfVQjpXCZedGfVQjpXCZedGfVQjpXCZedGfVQjpXCZedGfVQjpXCZedGfVQVrgyHVC"
for s in subaddrs:
assert subaddr0 <= s <= subaddrZ
assert len(set(subaddrs)) == len(subaddrs)
net.mine(blocks=2)
assert hal.balances(refresh=True) == coins(5 * len(subaddrs), 0)
net.mine(blocks=8)
assert hal.balances(refresh=True) == balance(5 * len(subaddrs))
subaccounts = []
for i in range(3):
r = hal.json_rpc("create_account").json()["result"]
assert r["account_index"] == i + 1
assert subaddr0 <= r["address"] <= subaddrZ
subaccounts.append(r["address"])
hal.json_rpc("create_address", {"account_index": i + 1, "count": 1})
assert len(set(subaccounts + subaddrs)) == len(subaccounts) + len(subaddrs)
for i in range(3):
assert subaccounts[i] == hal.get_subaddress(i + 1, 0)
subaddrs.append(hal.get_subaddress(i + 1, 1))
for s in subaddrs:
assert subaddr0 <= s <= subaddrZ
assert len(set(subaccounts + subaddrs)) == len(subaccounts) + len(subaddrs)
assert len(subaccounts) + len(subaddrs) == 9
mike.multi_transfer(
subaddrs + subaccounts, coins(list(range(1, 1 + len(subaddrs) + len(subaccounts))))
)
net.mine()
hal.refresh()
balances = []
for i in range(len(subaccounts) + 1):
r = hal.json_rpc(
"get_balance", {"account_index": i, "subaddress_indices": list(range(10))}
).json()["result"]
balances.append(
(
r["balance"],
r["unlocked_balance"],
{x["address"]: x["unlocked_balance"] for x in r["per_subaddress"]},
)
)
assert balances == [
(coins(21), coins(21), {subaddrs[i]: coins(5 + i + 1) for i in range(3)}),
(coins(11), coins(11), {subaddrs[3]: coins(4), subaccounts[0]: coins(7)}),
(coins(13), coins(13), {subaddrs[4]: coins(5), subaccounts[1]: coins(8)}),
(coins(15), coins(15), {subaddrs[5]: coins(6), subaccounts[2]: coins(9)}),
]
def test_subaddr_send(net, mike, alice, bob, hal, ledger):
mike.transfer(hal, coins(100))
net.mine()
alice.json_rpc("create_address", {"count": 2})
bob.json_rpc("create_address", {"count": 2})
hal.refresh()
mike_bal = mike.balances(refresh=True)
to = [
addrs
for w in (alice, bob)
for addrs in (w.address(), w.get_subaddress(0, 1), w.get_subaddress(0, 2))
]
assert len(to) == 6
amounts = list(range(1, len(to) + 1))
store_fee = StoreFee()
recipient_addrs = []
def store_addr(val):
nonlocal recipient_addrs
recipient_addrs.append(val)
recipient_amounts = []
def store_amount(_, m):
nonlocal recipient_addrs
recipient_amounts.append(m[1][1])
recipient_expected = ledger.buggy_crap([(addr, f"{amt}.0") for addr, amt in zip(to, amounts)])
recipient_expected.sort()
hal.timeout = 300 # creating this tx with the ledger takes ages
run_with_interactions(
ledger,
partial(hal.multi_transfer, to, [coins(a) for a in amounts]),
ExactScreen(["Processing TX"]),
MatchScreen([r"^Confirm Fee$", r"^(0\.\d{1,9})$"], store_fee, fail_index=1),
Do.right,
ExactScreen(["Accept"]),
Do.both,
*(
cmds
for i in range(len(recipient_expected))
for cmds in [
MatchScreen([r"^Confirm Amount$", r"^(\d+\.\d+)$"], store_amount, fail_index=1),
Do.right,
MatchMulti("Recipient", None, callback=store_addr),
Do.right,
ExactScreen(["Accept"]),
Do.both,
]
),
ExactScreen(["Processing TX"]),
timeout=180,
)
assert 0.03 < store_fee.fee < 1
recipient_got = sorted(zip(recipient_addrs, recipient_amounts))
assert recipient_expected == recipient_got
vprint("recipients look good, checking final balances")
net.mine()
assert alice.balances(refresh=True) == coins(6, 6)
assert bob.balances(refresh=True) == coins(15, 15)
assert hal.balances(refresh=True) == balance(100 - sum(amounts) - store_fee.fee)

14
tests/ledger/utils.py Normal file
View File

@ -0,0 +1,14 @@
from service_node_network import coins, vprint
def balance(c):
"""Shortcut for coins(c,c), particularly useful when c is complex"""
return coins(c, c)
class StoreFee:
def __init__(self):
self.fee = None
def __call__(self, _, m):
self.fee = float(m[1][1])

1
tests/ledger/vprint.py Symbolic link
View File

@ -0,0 +1 @@
../network_tests/vprint.py

View File

@ -1,11 +1,180 @@
#!/usr/bin/python3
import pytest
from service_node_network import net, alice, bob, mike, chuck, chuck_double_spend
import os.path
import service_node_network
def pytest_addoption(parser):
parser.addoption("--binary-dir", default="../../build/bin", action="store")
@pytest.fixture(scope="session")
def binary_dir(request):
return request.config.getoption('--binary-dir')
binpath = request.config.getoption("--binary-dir")
for exe in ("oxend", "oxen-wallet-rpc"):
b = f"{binpath}/{exe}"
if not os.path.exists(b):
raise FileNotFoundError(
b,
f"Required executable ({b}) not found; build the project, or specify an alternate build/bin dir with --binary-dir",
)
return binpath
@pytest.fixture
def net(pytestconfig, tmp_path, binary_dir):
return service_node_network.sn_net(pytestconfig, tmp_path, binary_dir)
# Shortcuts for accessing the named wallets
@pytest.fixture
def alice(net):
return net.alice
@pytest.fixture
def bob(net):
return net.bob
@pytest.fixture
def mike(net):
return net.mike
@pytest.fixture
def chuck(net):
"""
`chuck` is the wallet of a potential attacker, with some extra add-ons. The main `chuck` wallet
is connected to one of the three network nodes (like alice or bob), and starts out empty.
Chuck also has a second copy of the same wallet, `chuck.hidden`, which is connected to his own
private node, `chuck.hidden.node`. This node is connected to the network exclusively through a
second node that Chuck runs, `chuck.bridge`. This allows chuck to disconnect from the network
by stopping the bridge node and reconnect by restarting it. Note that the bridge and hidden
nodes will not have received proofs (and so can't be used to submit blinks).
"""
chuck = Wallet(
node=net.nodes[0],
name="Chuck",
rpc_wallet=net.binpath + "/oxen-wallet-rpc",
datadir=net.datadir,
)
chuck.ready(wallet="chuck")
hidden_node = Daemon(oxend=net.binpath + "/oxend", datadir=net.datadir)
bridge_node = Daemon(oxend=net.binpath + "/oxend", datadir=net.datadir)
for x in (4, 7):
bridge_node.add_peer(net.all_nodes[x])
bridge_node.add_peer(hidden_node)
hidden_node.add_peer(bridge_node)
vprint(
"Starting new chuck oxend bridge node with RPC on {}:{}".format(
bridge_node.listen_ip, bridge_node.rpc_port
)
)
bridge_node.start()
bridge_node.wait_for_json_rpc("get_info")
net.sync(extra_nodes=[bridge_node], extra_wallets=[chuck])
vprint(
"Starting new chuck oxend hidden node with RPC on {}:{}".format(
hidden_node.listen_ip, hidden_node.rpc_port
)
)
hidden_node.start()
hidden_node.wait_for_json_rpc("get_info")
net.sync(extra_nodes=[hidden_node, bridge_node], extra_wallets=[chuck])
vprint("Done syncing chuck nodes")
# RPC wallet doesn't provide a way to import from a key or mnemonic, so we have to stop the rpc
# wallet then copy the underlying wallet file.
chuck.refresh()
chuck.stop()
chuck.hidden = Wallet(
node=hidden_node,
name="Chuck (hidden)",
rpc_wallet=net.binpath + "/oxen-wallet-rpc",
datadir=net.datadir,
)
import shutil
import os
wallet_base = chuck.walletdir + "/chuck"
assert os.path.exists(wallet_base)
assert os.path.exists(wallet_base + ".keys")
os.makedirs(chuck.hidden.walletdir, exist_ok=True)
shutil.copy(wallet_base, chuck.hidden.walletdir + "/chuck2")
shutil.copy(wallet_base + ".keys", chuck.hidden.walletdir + "/chuck2.keys")
# Restart the regular wallet and the newly copied hidden wallet
chuck.ready(wallet="chuck", existing=True)
chuck.hidden.ready(wallet="chuck2", existing=True)
chuck.refresh()
chuck.hidden.refresh()
assert chuck.address() == chuck.hidden.address()
chuck.bridge = bridge_node
return chuck
@pytest.fixture
def chuck_double_spend(net, alice, mike, chuck):
"""
Importing this fixture (along with `chuck` itself!) extends the chuck setup to transfer 100
coins to chuck, mine them to confirmation, then stop his bridge node to double-spend those
funds. This consists of a blink tx of 95 (sent to alice) on the connected network and a
conflicting regular tx (sent to himself) submitted to the mempool of his local hidden (and now
disconnected) node.
The fixture value is a tuple of the submitted tx details as returned by the rpc wallet,
`(blinked_tx, hidden_tx)`.
"""
assert chuck.balances() == (0, 0)
mike.transfer(chuck, coins(100))
net.mine()
net.sync(extra_nodes=[chuck.bridge, chuck.hidden.node], extra_wallets=[chuck, chuck.hidden])
assert chuck.balances() == coins(100, 100)
assert chuck.hidden.balances() == coins(100, 100)
# Now we disconnect chuck's bridge node, which will isolate the hidden node.
chuck.bridge.stop()
tx_blink = chuck.transfer(alice, coins(95), priority=5)
assert len(tx_blink["tx_hash_list"]) == 1
blink_hash = tx_blink["tx_hash_list"][0]
time.sleep(0.5) # allow blink to propagate
# ... but it shouldn't have propagated here because this is disconnected, so we can submit a
# conflicting tx:
tx_hidden = chuck.hidden.transfer(chuck, coins(95), priority=1)
assert len(tx_hidden["tx_hash_list"]) == 1
hidden_hash = tx_hidden["tx_hash_list"][0]
assert hidden_hash != blink_hash
vprint("double-spend txs: blink: {}, hidden: {}".format(blink_hash, hidden_hash))
net.sync()
alice.refresh()
assert alice.balances() == coins(95, 0)
mike_txpool = [
x["id_hash"] for x in mike.node.rpc("/get_transaction_pool").json()["transactions"]
]
assert mike_txpool == [blink_hash]
hidden_txpool = [
x["id_hash"] for x in chuck.hidden.node.rpc("/get_transaction_pool").json()["transactions"]
]
assert hidden_txpool == [hidden_hash]
return (tx_blink, tx_hidden)

View File

@ -6,13 +6,25 @@ import requests
import subprocess
import time
def coins(*args):
if len(args) != 1:
return tuple(coins(x) for x in args)
x = args[0]
if type(x) in (tuple, list):
return type(x)(coins(i) for i in x)
return round(x * 1_000_000_000)
# On linux we can pick a random 127.x.y.z IP which is highly likely to not have anything listening
# on it (so we make bind conflicts highly unlikely). On most other OSes we have to listen on
# 127.0.0.1 instead, so we pick a random starting port instead to try to minimize bind conflicts.
LISTEN_IP, NEXT_PORT = (
('127.' + '.'.join(str(random.randint(1, 254)) for _ in range(3)), 1100)
if sys.platform == 'linux' else
('127.0.0.1', random.randint(5000, 20000)))
("127." + ".".join(str(random.randint(1, 254)) for _ in range(3)), 1100)
if sys.platform == "linux"
else ("127.0.0.1", random.randint(5000, 20000))
)
def next_port():
global NEXT_PORT
@ -37,12 +49,11 @@ class RPCDaemon:
self.name = name
self.proc = None
self.terminated = False
self.timeout = 10 # subclass should override if needed
def __del__(self):
self.stop()
def terminate(self, repeat=False):
"""Sends a TERM signal if one hasn't already been sent (or even if it has, with
repeat=True). Does not wait for exit."""
@ -50,92 +61,103 @@ class RPCDaemon:
self.proc.terminate()
self.terminated = True
def start(self):
if self.proc and self.proc.poll() is None:
raise RuntimeError("Cannot start process that is already running!")
self.proc = subprocess.Popen(self.arguments(),
stdin=subprocess.DEVNULL, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
self.proc = subprocess.Popen(
self.arguments(),
stdin=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
self.terminated = False
def stop(self):
def stop(self, timeout=None):
"""Tries stopping with a term at first, then a kill if the term hasn't worked after 10s"""
if self.proc:
self.terminate()
timeout = timeout or self.timeout
try:
self.proc.wait(timeout=10)
self.proc.wait(timeout=timeout)
except subprocess.TimeoutExpired:
print("{} took more than 10s to exit, killing it".format(self.name))
print(f"{self.name} took more than {timeout}s to exit, killing it")
self.proc.kill()
self.proc = None
def arguments(self):
"""Returns the startup arguments; default is just self.args, but subclasses can override."""
return self.args
def json_rpc(self, method, params=None, *, timeout=10):
def json_rpc(self, method, params=None, *, timeout=None):
"""Sends a json_rpc request to the rpc port. Returns the response object."""
if not self.proc:
raise RuntimeError("Cannot make rpc request before calling start()")
json = {
"jsonrpc": "2.0",
"id": "0",
"method": method,
}
json = {"jsonrpc": "2.0", "id": "0", "method": method}
if params:
json["params"] = params
return requests.post('http://{}:{}/json_rpc'.format(self.listen_ip, self.rpc_port), json=json, timeout=timeout)
return requests.post(
f"http://{self.listen_ip}:{self.rpc_port}/json_rpc",
json=json,
timeout=timeout or self.timeout,
)
def rpc(self, path, params=None, *, timeout=10):
def rpc(self, path, params=None, *, timeout=None):
"""Sends a non-json_rpc rpc request to the rpc port at path `path`, e.g. /get_info. Returns the response object."""
if not self.proc:
raise RuntimeError("Cannot make rpc request before calling start()")
return requests.post('http://{}:{}{}'.format(self.listen_ip, self.rpc_port, path), json=params, timeout=timeout)
return requests.post(
f"http://{self.listen_ip}:{self.rpc_port}{path}", json=params, timeout=timeout
)
def wait_for_json_rpc(self, method, params=None, *, timeout=None):
"""Calls `json_rpc', sleeping if it fails for up time `timeout' seconds (self.timeout if
omitted). Returns the response if it succeeds, raises the last exception if timeout is
reached. If the process exit, raises a RuntimeError"""
def wait_for_json_rpc(self, method, params=None, *, timeout=10):
"""Calls `json_rpc', sleeping if it fails for up time `timeout' seconds. Returns the
response if it succeeds, raises the last exception if timeout is reached. If the process
exit, raises a RuntimeError"""
until = time.time() + timeout
until = time.time() + (timeout or self.timeout)
now = time.time()
while now < until:
exit_status = self.proc.poll()
if exit_status is not None:
raise ProcessExited("{} exited ({}) while waiting for an RPC response".format(self.name, exit_status))
raise ProcessExited(
f"{self.name} exited ({exit_status}) while waiting for an RPC response"
)
timeout = until - now
try:
return self.json_rpc(method, params, timeout=timeout)
except:
if time.time() + .25 >= until:
if time.time() + 0.25 >= until:
raise
time.sleep(.25)
time.sleep(0.25)
now = time.time()
if now >= until:
raise
class Daemon(RPCDaemon):
base_args = ('--dev-allow-local-ips', '--fixed-difficulty=1', '--regtest', '--non-interactive')
base_args = ("--dev-allow-local-ips", "--fixed-difficulty=1", "--regtest", "--non-interactive")
def __init__(self, *,
oxend='oxend',
listen_ip=None, p2p_port=None, rpc_port=None, zmq_port=None, qnet_port=None, ss_port=None,
name=None,
datadir=None,
service_node=False,
log_level=2,
peers=()):
def __init__(
self,
*,
oxend="oxend",
listen_ip=None,
p2p_port=None,
rpc_port=None,
zmq_port=None,
qnet_port=None,
ss_port=None,
name=None,
datadir=None,
service_node=False,
log_level=2,
peers=(),
):
self.rpc_port = rpc_port or next_port()
if name is None:
name = 'oxend@{}'.format(self.rpc_port)
name = f"oxend@{self.rpc_port}"
super().__init__(name)
self.listen_ip = listen_ip or LISTEN_IP
self.p2p_port = p2p_port or next_port()
@ -146,30 +168,29 @@ class Daemon(RPCDaemon):
self.args = [oxend] + list(self.__class__.base_args)
self.args += (
'--data-dir={}/oxen-{}-{}'.format(datadir or '.', self.listen_ip, self.rpc_port),
'--log-level={}'.format(log_level),
'--log-file=oxen.log'.format(self.listen_ip, self.p2p_port),
'--p2p-bind-ip={}'.format(self.listen_ip),
'--p2p-bind-port={}'.format(self.p2p_port),
'--rpc-admin={}:{}'.format(self.listen_ip, self.rpc_port),
'--quorumnet-port={}'.format(self.qnet_port),
)
f"--data-dir={datadir or '.'}/oxen-{self.listen_ip}-{self.rpc_port}",
f"--log-level={log_level}",
"--log-file=oxen.log",
f"--p2p-bind-ip={self.listen_ip}",
f"--p2p-bind-port={self.p2p_port}",
f"--rpc-admin={self.listen_ip}:{self.rpc_port}",
f"--quorumnet-port={self.qnet_port}",
)
for d in peers:
self.add_peer(d)
if service_node:
self.args += (
'--service-node',
'--service-node-public-ip={}'.format(self.listen_ip),
'--storage-server-port={}'.format(self.ss_port),
)
"--service-node",
f"--service-node-public-ip={self.listen_ip}",
f"--storage-server-port={self.ss_port}",
)
def arguments(self):
return self.args + [
'--add-exclusive-node={}:{}'.format(node.listen_ip, node.p2p_port) for node in self.peers]
f"--add-exclusive-node={node.listen_ip}:{node.p2p_port}" for node in self.peers
]
def ready(self):
"""Waits for the daemon to get ready, i.e. for it to start returning something to a
@ -178,89 +199,99 @@ class Daemon(RPCDaemon):
self.start()
self.wait_for_json_rpc("get_info")
def add_peer(self, node):
"""Adds a peer. Must be called before starting."""
if self.proc:
raise RuntimeError("add_peer needs to be called before start()")
self.peers.append(node)
def remove_peer(self, node):
"""Removes a peer. Must be called before starting."""
if self.proc:
raise RuntimeError("remove_peer needs to be called before start()")
self.peers.remove(node)
def mine_blocks(self, num_blocks, wallet, *, slow=True):
a = wallet.address()
self.rpc('/start_mining', {
"miner_address": a,
"threads_count": 1,
"num_blocks": num_blocks,
"slow_mining": slow
});
self.rpc(
"/start_mining",
{"miner_address": a, "threads_count": 1, "num_blocks": num_blocks, "slow_mining": slow},
)
def sn_pubkey(self):
return self.json_rpc("get_service_keys").json()["result"]["service_node_pubkey"]
def height(self):
return self.rpc("/get_height").json()["height"]
def txpool_hashes(self):
return [x['id_hash'] for x in self.rpc("/get_transaction_pool").json()['transactions']]
return [x["id_hash"] for x in self.rpc("/get_transaction_pool").json()["transactions"]]
def ping(self, *, storage=True, lokinet=True):
"""Sends fake storage server and lokinet pings to the running oxend"""
if storage:
self.json_rpc("storage_server_ping", { "version_major": 9, "version_minor": 9, "version_patch": 9 })
self.json_rpc(
"storage_server_ping", {"version_major": 9, "version_minor": 9, "version_patch": 9}
)
if lokinet:
self.json_rpc("lokinet_ping", { "version": [9,9,9] })
self.json_rpc("lokinet_ping", {"version": [9, 9, 9]})
def p2p_resync(self):
"""Triggers a p2p resync to happen soon (i.e. at the next p2p idle loop)."""
self.json_rpc("test_trigger_p2p_resync")
class Wallet(RPCDaemon):
base_args = ('--disable-rpc-login', '--non-interactive', '--password','', '--regtest', '--disable-rpc-long-poll',
'--rpc-ssl=disabled', '--daemon-ssl=disabled')
base_args = (
"--disable-rpc-login",
"--non-interactive",
"--password",
"",
"--regtest",
"--disable-rpc-long-poll",
)
def __init__(
self,
node,
*,
rpc_wallet='oxen-wallet-rpc',
name=None,
datadir=None,
listen_ip=None,
rpc_port=None,
log_level=2):
self,
node,
*,
rpc_wallet="oxen-wallet-rpc",
name=None,
datadir=None,
listen_ip=None,
rpc_port=None,
ledger_api=None, # e.g. "http://localhost:5000"
ledger_apdu=None, # e.g. "localhost:1111"
log_level=2,
):
self.listen_ip = listen_ip or LISTEN_IP
self.rpc_port = rpc_port or next_port()
self.node = node
self.ledger_api = ledger_api
self.ledger_apdu = ledger_apdu
if bool(self.ledger_api) != bool(self.ledger_apdu):
raise RuntimeError("ledger_api/ledger_apdu are mutually dependent")
self.name = name or 'wallet@{}'.format(self.rpc_port)
self.name = name or f"wallet@{self.rpc_port}"
super().__init__(self.name)
self.walletdir = '{}/wallet-{}-{}'.format(datadir or '.', self.listen_ip, self.rpc_port)
self.timeout = 60 if self.ledger_api else 10
self.walletdir = f'{datadir or "."}/wallet-{self.listen_ip}-{self.rpc_port}'
self.args = [rpc_wallet] + list(self.__class__.base_args)
self.args += (
'--rpc-bind-ip={}'.format(self.listen_ip),
'--rpc-bind-port={}'.format(self.rpc_port),
'--log-level={}'.format(log_level),
'--log-file={}/log.txt'.format(self.walletdir),
'--daemon-address={}:{}'.format(node.listen_ip, node.rpc_port),
'--wallet-dir={}'.format(self.walletdir),
)
f"--rpc-bind-ip={self.listen_ip}",
f"--rpc-bind-port={self.rpc_port}",
f"--log-level={log_level}",
f"--log-file={self.walletdir}/log.txt",
f"--shared-ringdb-dir",
"",
f"--daemon-address={node.listen_ip}:{node.rpc_port}",
f"--wallet-dir={self.walletdir}",
)
self.wallet_address = None
def ready(self, wallet="wallet", existing=False):
"""Makes the wallet ready, waiting for it to start up and create a new wallet (or load an
existing one, if `existing`) within the rpc wallet. Calls `start()` first if it hasn't
@ -272,14 +303,26 @@ class Wallet(RPCDaemon):
if existing:
r = self.wait_for_json_rpc("open_wallet", {"filename": wallet, "password": ""})
else:
r = self.wait_for_json_rpc("create_wallet", {"filename": wallet, "password": "", "language": "English"})
if 'result' not in r.json():
raise RuntimeError("Cannot open or create wallet: {}".format(r['error'] if 'error' in r else 'Unexpected response: {}'.format(r)))
params = {"filename": wallet, "password": "", "language": "English"}
if self.ledger_api:
params["hardware_wallet"] = True
params["device_name"] = "LedgerTCP"
params["debug_reset"] = True
# These are fairly slow (~0.2s each) for the device to construct during
# initialization, so severely reduce them for testing:
params["subaddress_lookahead_major"] = 2
params["subaddress_lookahead_minor"] = 2
r = self.wait_for_json_rpc("create_wallet", params)
if "result" not in r.json():
raise RuntimeError(
"Cannot open or create wallet: {}".format(
r["error"] if "error" in r else f"Unexpected response: {r.json()}"
)
)
def refresh(self):
return self.json_rpc('refresh')
return self.json_rpc("refresh")
def address(self):
if not self.wallet_address:
@ -287,66 +330,222 @@ class Wallet(RPCDaemon):
return self.wallet_address
def get_subaddress(self, account, subaddr):
r = self.json_rpc(
"get_address", {"account_index": account, "address_index": [subaddr]}
).json()
if "result" not in r:
raise RuntimeError(f"Unable to retrieve subaddr {account}.{subaddr}: {r['error']}")
return r["result"]["addresses"][0]["address"]
def new_wallet(self):
self.wallet_address = None
r = self.wait_for_json_rpc("close_wallet")
if 'result' not in r.json():
raise RuntimeError("Cannot close current wallet: {}".format(r['error'] if 'error' in r else 'Unexpected response: {}'.format(r)))
if not hasattr(self, 'wallet_suffix'):
if "result" not in r.json():
raise RuntimeError(
"Cannot close current wallet: {}".format(
r["error"] if "error" in r else f"Unexpected response: {r.json()}"
)
)
if not hasattr(self, "wallet_suffix"):
self.wallet_suffix = 2
else:
self.wallet_suffix += 1
r = self.wait_for_json_rpc("create_wallet", {"filename": "{}_{}".format(self.wallet_filename, self.wallet_suffix), "password": "", "language": "English"})
if 'result' not in r.json():
raise RuntimeError("Cannot create wallet: {}".format(r['error'] if 'error' in r else 'Unexpected response: {}'.format(r)))
r = self.wait_for_json_rpc(
"create_wallet",
{
"filename": f"{self.wallet_filename}_{self.wallet_suffix}",
"password": "",
"language": "English",
},
)
if "result" not in r.json():
raise RuntimeError(
"Cannot create wallet: {}".format(
r["error"] if "error" in r else f"Unexpected response: {r.json()}"
)
)
def height(self, refresh=False):
"""Returns current wallet height. Can optionally refresh first."""
if refresh:
self.refresh()
return self.json_rpc("get_height").json()["result"]["height"]
def balances(self, refresh=False):
"""Returns (total, unlocked) balances. Can optionally refresh first."""
if refresh:
self.refresh()
b = self.json_rpc("get_balance").json()['result']
return (b['balance'], b['unlocked_balance'])
b = self.json_rpc("get_balance").json()["result"]
return (b["balance"], b["unlocked_balance"])
def transfer(self, to, amount=None, *, priority=None, sweep=False):
"""Attempts a transfer. Throws TransferFailed if it gets rejected by the daemon, otherwise
returns the 'result' key."""
if isinstance(to, Wallet):
to = to.address()
else:
assert isinstance(to, str)
if priority is None:
priority = 1
if sweep and not amount:
r = self.json_rpc("sweep_all", {"address": to.address(), "priority": priority})
r = self.json_rpc("sweep_all", {"address": to, "priority": priority})
elif amount and not sweep:
r = self.json_rpc("transfer_split", {"destinations": [{"address": to.address(), "amount": amount}], "priority": priority})
r = self.json_rpc(
"transfer_split",
{"destinations": [{"address": to, "amount": amount}], "priority": priority},
)
else:
raise RuntimeError("Wallet.transfer: either `sweep` or `amount` must be given")
r = r.json()
if 'error' in r:
raise TransferFailed("Transfer failed: {}".format(r['error']['message']), r)
return r['result']
if "error" in r:
raise TransferFailed(f"Transfer failed: {r['error']['message']}", r)
return r["result"]
def multi_transfer(self, recipients, amounts, *, priority=None):
"""Attempts a transfer to multiple recipients at once. Throws TransferFailed if it gets
rejected by the daemon, otherwise returns the 'result' key."""
assert 0 < len(recipients) == len(amounts)
for i in range(len(recipients)):
if isinstance(recipients[i], Wallet):
recipients[i] = recipients[i].address()
else:
assert isinstance(recipients[i], str)
if priority is None:
priority = 1
r = self.json_rpc(
"transfer_split",
{
"destinations": [{"address": r, "amount": a} for r, a in zip(recipients, amounts)],
"priority": priority,
},
)
r = r.json()
if "error" in r:
raise TransferFailed(f"Transfer failed: {r['error']['message']}", r)
return r["result"]
def find_transfers(self, txids, in_=True, pool=True, out=True, pending=False, failed=False):
transfers = self.json_rpc('get_transfers', {'in':in_, 'pool':pool, 'out':out, 'pending':pending, 'failed':failed }).json()['result']
transfers = self.json_rpc(
"get_transfers",
{"in": in_, "pool": pool, "out": out, "pending": pending, "failed": failed},
).json()["result"]
def find_tx(txid):
for type_, txs in transfers.items():
for tx in txs:
if tx['txid'] == txid:
if tx["txid"] == txid:
return tx
return [find_tx(txid) for txid in txids]
def register_sn(self, sn, stake=coins(100), fee=10):
r = sn.json_rpc(
"get_service_node_registration_cmd",
{
"operator_cut": "100" if stake == coins(100) else f"{fee}",
"contributions": [{"address": self.address(), "amount": stake}],
"staking_requirement": coins(100),
},
).json()
if "error" in r:
raise RuntimeError(f"Registration cmd generation failed: {r['error']['message']}")
cmd = r["result"]["registration_cmd"]
if cmd == "":
# everything about this command is dumb, include its error handling
raise RuntimeError(f"Registration cmd generation failed: {r['result']['status']}")
def register_sn(self, sn):
r = sn.json_rpc("get_service_node_registration_cmd", {
"operator_cut": "100",
"contributions": [{"address": self.address(), "amount": 100000000000}],
"staking_requirement": 100000000000
}).json()
if 'error' in r:
raise RuntimeError("Registration cmd generation failed: {}".format(r['error']['message']))
cmd = r['result']['registration_cmd']
r = self.json_rpc("register_service_node", {"register_service_node_str": cmd}).json()
if 'error' in r:
raise RuntimeError("Failed to submit service node registration tx: {}".format(r['error']['message']))
if "error" in r:
raise RuntimeError(
"Failed to submit service node registration tx: {}".format(r["error"]["message"])
)
def stake_sn(self, sn, stake):
r = self.json_rpc(
"stake",
{"destination": self.address(), "amount": stake, "service_node_key": sn.sn_pubkey()},
).json()
if "error" in r:
raise RuntimeError(f"Failed to submit stake: {r['error']['message']}")
def unstake_sn(self, sn):
r = self.json_rpc("request_stake_unlock", {"service_node_key": sn.sn_pubkey()}).json()
if "error" in r:
raise RuntimeError(f"Failed to submit unstake: {r['error']['message']}")
if not r["result"]["unlocked"]:
raise RuntimeError(f"Failed to submit unstake: {r['result']['msg']}")
def buy_ons(self, onstype, name, value, *, owner=None, backup_owner=None):
if onstype not in (
"session",
"wallet",
"lokinet",
"lokinet_2y",
"lokinet_5y",
"lokinet_10y",
):
raise ValueError(f"Invalid ONS type '{onstype}'")
params = {
"type": onstype,
"owner": self.address() if owner is None else owner,
"name": name,
"value": value,
}
if backup_owner:
params["backup_owner"] = backup_owner
r = self.json_rpc("ons_buy_mapping", params).json()
if "error" in r:
raise RuntimeError(f"Failed to buy ONS: {r['error']['message']}")
return r
def renew_ons(self, onstype, name):
if onstype not in ("lokinet", "lokinet_2y", "lokinet_5y", "lokinet_10y"):
raise ValueError(f"Invalid ONS renewal type '{onstype}'")
r = self.json_rpc("ons_renew_mapping", {"type": onstype, "name": name}).json()
if "error" in r:
raise RuntimeError(f"Failed to buy ONS: {r['error']['message']}")
return r
def update_ons(self, onstype, name, *, value=None, owner=None, backup_owner=None):
if onstype not in ("session", "wallet", "lokinet"):
raise ValueError(f"Invalid ONS update type '{onstype}'")
params = {"type": onstype, "name": name}
if value is not None:
params["value"] = value
if owner is not None:
params["owner"] = owner
if backup_owner is not None:
params["backup_owner"] = backup_owner
r = self.json_rpc("ons_update_mapping", params).json()
if "error" in r:
raise RuntimeError(f"Failed to buy ONS: {r['error']['message']}")
return r
def get_ons(self, *, include_txid=False, include_encrypted=False, include_height=False):
r = self.json_rpc("ons_known_names", {"decrypt": True}).json()
if "error" in r:
raise RuntimeError(f"Failed to buy ONS: {r['error']['message']}")
names = sorted(r["result"]["known_names"], key=lambda x: (x["type"], x["name"]))
if not include_txid:
for n in names:
del n["txid"]
if not include_encrypted:
for n in names:
del n["encrypted_value"]
if not include_height:
for n in names:
del n["update_height"]
n.pop("expiration_height", None)
return names

View File

@ -1,5 +1,5 @@
# Provides a pytest fixture of a configured service node network with 20 service nodes, 3 regular
# nodes, and 3 wallets (each connected to a different node).
# Provides a configured service node network with 20 service nodes, 3 regular nodes, and 3 wallets
# (each connected to a different node).
#
# The 20 service nodes are registered, have mined enough to make the blink quorum active, and have
# sent uptime proofs to each other.
@ -12,26 +12,16 @@
# - alice and bob will have any existing funds transferred to mike but may still have tx history of
# previous tests. (The wallet-emptying sweep to mike, however, may not yet be confirmed).
#
# A fourth malicious wallet is available by importing the fixture `chuck`, which generates new
# wallets and nodes each time (see the fixture for details).
from daemons import Daemon, Wallet
from daemons import Daemon, Wallet, coins
import vprint as v
from vprint import vprint
import random
import time
from datetime import datetime
import uuid
import pytest
def coins(*args):
if len(args) != 1:
return tuple(coins(x) for x in args)
x = args[0]
if type(x) in (tuple, list):
return type(x)(coins(i) for i in x)
return round(x * 1000000000)
def wait_for(callback, timeout=10):
expires = time.time() + timeout
@ -43,39 +33,34 @@ def wait_for(callback, timeout=10):
pass
if time.time() >= expires:
raise RuntimeError("task timeout expired")
time.sleep(.25)
verbose = False
def vprint(*args, timestamp=True, **kwargs):
global verbose
if verbose:
if timestamp:
print(datetime.now(), end=" ")
print(*args, **kwargs)
time.sleep(0.25)
class SNNetwork:
def __init__(self, datadir, *, binpath='../../build/bin', sns=20, nodes=3):
def __init__(self, datadir, *, binpath, sns=20, nodes=3, unstaked_sns=0):
self.datadir = datadir
self.binpath = binpath
vprint("Using '{}' for data files and logs".format(datadir))
nodeopts = dict(oxend=self.binpath+'/oxend', datadir=datadir)
nodeopts = dict(oxend=self.binpath + "/oxend", datadir=datadir)
self.sns = [Daemon(service_node=True, **nodeopts) for _ in range(sns)]
self.unstaked_sns = [Daemon(service_node=True, **nodeopts) for _ in range(unstaked_sns)]
self.nodes = [Daemon(**nodeopts) for _ in range(nodes)]
self.all_nodes = self.sns + self.nodes
self.all_nodes = self.sns + self.unstaked_sns + self.nodes
self.wallets = []
for name in ('Alice', 'Bob', 'Mike'):
self.wallets.append(Wallet(
node=self.nodes[len(self.wallets) % len(self.nodes)],
name=name,
rpc_wallet=self.binpath+'/oxen-wallet-rpc',
datadir=datadir))
for name in ("Alice", "Bob", "Mike"):
self.wallets.append(
Wallet(
node=self.nodes[len(self.wallets) % len(self.nodes)],
name=name,
rpc_wallet=self.binpath + "/oxen-wallet-rpc",
datadir=datadir,
)
)
self.alice, self.bob, self.mike = self.wallets
@ -86,12 +71,18 @@ class SNNetwork:
if i != k:
self.all_nodes[i].add_peer(self.all_nodes[k])
vprint("Starting new oxend service nodes with RPC on {} ports".format(self.sns[0].listen_ip), end="")
for sn in self.sns:
vprint(
"Starting new oxend service nodes with RPC on {} ports".format(self.sns[0].listen_ip),
end="",
)
for sn in self.sns + self.unstaked_sns:
vprint(" {}".format(sn.rpc_port), end="", flush=True, timestamp=False)
sn.start()
vprint(timestamp=False)
vprint("Starting new regular oxend nodes with RPC on {} ports".format(self.nodes[0].listen_ip), end="")
vprint(
"Starting new regular oxend nodes with RPC on {} ports".format(self.nodes[0].listen_ip),
end="",
)
for d in self.nodes:
vprint(" {}".format(d.rpc_port), end="", flush=True, timestamp=False)
d.start()
@ -114,28 +105,29 @@ class SNNetwork:
for w in self.wallets:
w.wait_for_json_rpc("refresh")
# Mine some blocks; we need 100 per SN registration, and we can nearly 600 on fakenet before
# it hits HF16 and kills mining rewards. This lets us submit the first 5 SN registrations a
# SN (at height 40, which is the earliest we can submit them without getting an occasional
# Mine some blocks; we need 100 per SN registration, and we can mine ~473 on fakenet before
# it hits HF16 and kills mining rewards. This lets us submit the first 4 SN registrations
# (at height 50, which is the earliest we can submit them without getting an occasional
# spurious "Not enough outputs to use" error).
# to unlock and the rest to have enough unlocked outputs for mixins), then more some more to
# earn SN rewards. We need 100 per SN registration, and each mined block gives us an input
# of 18.9, which means each registration requires 6 inputs. Thus we need a bare minimum of
# 6(N-5) blocks, plus the 30 lock time on coinbase TXes = 6N more blocks (after the initial
# 5 registrations).
# After this, we need more some more to earn SN rewards: 100 per SN registration, and each
# mined block gives us an input of 18.9, which means each registration requires 6 inputs.
# Thus we need a bare minimum of 6(N-5) blocks, plus the 30 lock time on coinbase TXes = 6N
# more blocks (after the initial 4 registrations).
self.mine(50)
self.print_wallet_balances()
vprint("Submitting first round of service node registrations: ", end="", flush=True)
for sn in self.sns[0:5]:
for sn in self.sns[0:3]:
self.mike.register_sn(sn)
vprint(".", end="", flush=True, timestamp=False)
vprint(timestamp=False)
if len(self.sns) > 5:
if len(self.sns) > 4:
vprint("Going back to mining", flush=True)
self.mine(6*len(self.sns))
self.mine(6 * (len(self.sns) - 4))
self.print_wallet_balances()
vprint("Submitting more service node registrations: ", end="", flush=True)
for sn in self.sns[5:]:
for sn in self.sns[4:]:
self.mike.register_sn(sn)
vprint(".", end="", flush=True, timestamp=False)
vprint(timestamp=False)
@ -143,35 +135,43 @@ class SNNetwork:
self.print_wallet_balances()
vprint("Mining 40 blocks (registrations + blink quorum lag) and waiting for nodes to sync")
self.sync_nodes(self.mine(40))
if len(self.sns) > 4:
vprint(
"Mining 40 blocks (registrations + blink quorum lag) and waiting for nodes to sync"
)
self.sync_nodes(self.mine(40))
self.print_wallet_balances()
self.print_wallet_balances()
else:
vprint("Mining 2 blocks (registrations) and waiting for nodes to sync")
vprint("Sending fake lokinet/ss pings")
for sn in self.sns:
sn.ping()
all_service_nodes_proofed = lambda sn: all(x['quorumnet_port'] > 0 for x in
sn.json_rpc("get_n_service_nodes", {"fields":{"quorumnet_port":True}}).json()['result']['service_node_states'])
if len(self.sns) > 1:
all_service_nodes_proofed = lambda sn: all(
x["quorumnet_port"] > 0
for x in sn.json_rpc(
"get_n_service_nodes", {"fields": {"quorumnet_port": True}}
).json()["result"]["service_node_states"]
)
vprint("Waiting for proofs to propagate: ", end="", flush=True)
for sn in self.sns:
wait_for(lambda: all_service_nodes_proofed(sn), timeout=120)
vprint(".", end="", flush=True, timestamp=False)
vprint(timestamp=False)
vprint("Done.")
vprint("Waiting for proofs to propagate: ", end="", flush=True)
for sn in self.sns:
wait_for(lambda: all_service_nodes_proofed(sn), timeout=120)
vprint(".", end="", flush=True, timestamp=False)
vprint(timestamp=False)
vprint("Done.")
vprint("Fake SN network setup complete!")
def refresh_wallets(self, *, extra=[]):
vprint("Refreshing wallets")
for w in self.wallets + extra:
w.refresh()
vprint("All wallets refreshed")
def mine(self, blocks=None, wallet=None, *, sync=False):
"""Mine some blocks to the given wallet (or self.mike if None) on the wallet's daemon.
Returns the daemon's height after mining the blocks. If blocks is omitted, mines enough to
@ -199,7 +199,6 @@ class SNNetwork:
return height
def sync_nodes(self, height=None, *, extra=[], timeout=10):
"""Waits for all nodes to reach the given height, typically invoked after mine()"""
nodes = self.all_nodes + extra
@ -228,25 +227,24 @@ class SNNetwork:
raise RuntimeError("Timed out waiting for node syncing")
vprint("All nodes synced to height {}".format(height))
def sync(self, extra_nodes=[], extra_wallets=[]):
"""Synchronizes everything: waits for all nodes to sync, then refreshes all wallets. Can be
given external wallets/nodes to sync."""
self.sync_nodes(extra=extra_nodes)
self.refresh_wallets(extra=extra_wallets)
def print_wallet_balances(self):
"""Instructs the wallets to refresh and prints their balances (does nothing in non-verbose mode)"""
global verbose
if not verbose:
if not v.verbose:
return
vprint("Balances:")
for w in self.wallets:
b = w.balances(refresh=True)
vprint(" {:5s}: {:.9f} (total) with {:.9f} (unlocked)".format(
w.name, b[0] * 1e-9, b[1] * 1e-9))
vprint(
" {:5s}: {:.9f} (total) with {:.9f} (unlocked)".format(
w.name, b[0] * 1e-9, b[1] * 1e-9
)
)
def __del__(self):
for n in self.all_nodes:
@ -254,18 +252,19 @@ class SNNetwork:
for w in self.wallets:
w.terminate()
snn = None
@pytest.fixture
def net(pytestconfig, tmp_path, binary_dir):
def sn_net(pytestconfig, tmp_path, binary_dir):
"""Fixture that returns the service node network. It is persistent across tests: the first time
it loads it starts the daemons and wallets, mines a bunch of blocks and submits SN
registrations. On subsequent loads it mines 5 blocks so that mike always has some available
funds, and sets alice and bob to new wallets."""
global snn, verbose
funds, and resets alice and bob to new wallets."""
global snn
if not snn:
verbose = pytestconfig.getoption('verbose') >= 2
if verbose:
v.verbose = pytestconfig.getoption("verbose") >= 2
if v.verbose:
print("\nConstructing initial service node network")
snn = SNNetwork(datadir=tmp_path, binpath=binary_dir)
else:
@ -274,7 +273,7 @@ def net(pytestconfig, tmp_path, binary_dir):
# Flush pools because some tests leave behind impossible txes
for n in snn.all_nodes:
assert n.json_rpc("flush_txpool").json()['result']['status'] == 'OK'
assert n.json_rpc("flush_txpool").json()["result"]["status"] == "OK"
# Mine a few to clear out anything in the mempool that can be cleared
snn.mine(5, sync=True)
@ -285,127 +284,11 @@ def net(pytestconfig, tmp_path, binary_dir):
return snn
# Shortcuts for accessing the named wallets
@pytest.fixture
def alice(net):
return net.alice
@pytest.fixture
def bob(net):
return net.bob
@pytest.fixture
def mike(net):
return net.mike
@pytest.fixture
def chuck(net):
def basic_net(pytestconfig, tmp_path, binary_dir):
"""
`chuck` is the wallet of a potential attacker, with some extra add-ons. The main `chuck` wallet
is connected to one of the three network nodes (like alice or bob), and starts out empty.
Chuck also has a second copy of the same wallet, `chuck.hidden`, which is connected to his own
private node, `chuck.hidden.node`. This node is connected to the network exclusively through a
second node that Chuck runs, `chuck.bridge`. This allows chuck to disconnect from the network
by stopping the bridge node and reconnect by restarting it. Note that the bridge and hidden
nodes will not have received proofs (and so can't be used to submit blinks).
Fixture that returns a network of just one service node (solely for the rewards) and one
regular node. Unlike sn_net, it is not persistent across tests: it starts new daemons and
wallets each time it is constructed.
"""
chuck = Wallet(node=net.nodes[0], name='Chuck', rpc_wallet=net.binpath+'/oxen-wallet-rpc', datadir=net.datadir)
chuck.ready(wallet="chuck")
hidden_node = Daemon(oxend=net.binpath+'/oxend', datadir=net.datadir)
bridge_node = Daemon(oxend=net.binpath+'/oxend', datadir=net.datadir)
for x in (4, 7):
bridge_node.add_peer(net.all_nodes[x])
bridge_node.add_peer(hidden_node)
hidden_node.add_peer(bridge_node)
vprint("Starting new chuck oxend bridge node with RPC on {}:{}".format(bridge_node.listen_ip, bridge_node.rpc_port))
bridge_node.start()
bridge_node.wait_for_json_rpc("get_info")
net.sync(extra_nodes=[bridge_node], extra_wallets=[chuck])
vprint("Starting new chuck oxend hidden node with RPC on {}:{}".format(hidden_node.listen_ip, hidden_node.rpc_port))
hidden_node.start()
hidden_node.wait_for_json_rpc("get_info")
net.sync(extra_nodes=[hidden_node, bridge_node], extra_wallets=[chuck])
vprint("Done syncing chuck nodes")
# RPC wallet doesn't provide a way to import from a key or mnemonic, so we have to stop the rpc
# wallet then copy the underlying wallet file.
chuck.refresh()
chuck.stop()
chuck.hidden = Wallet(node=hidden_node, name='Chuck (hidden)', rpc_wallet=net.binpath+'/oxen-wallet-rpc', datadir=net.datadir)
import shutil
import os
wallet_base = chuck.walletdir + '/chuck'
assert os.path.exists(wallet_base)
assert os.path.exists(wallet_base + '.keys')
os.makedirs(chuck.hidden.walletdir, exist_ok=True)
shutil.copy(wallet_base, chuck.hidden.walletdir + '/chuck2')
shutil.copy(wallet_base + '.keys', chuck.hidden.walletdir + '/chuck2.keys')
# Restart the regular wallet and the newly copied hidden wallet
chuck.ready(wallet="chuck", existing=True)
chuck.hidden.ready(wallet="chuck2", existing=True)
chuck.refresh()
chuck.hidden.refresh()
assert chuck.address() == chuck.hidden.address()
chuck.bridge = bridge_node
return chuck
@pytest.fixture
def chuck_double_spend(net, alice, mike, chuck):
"""
Importing this fixture (along with `chuck` itself!) extends the chuck setup to transfer 100
coins to chuck, mine them to confirmation, then stop his bridge node to double-spend those
funds. This consists of a blink tx of 95 (sent to alice) on the connected network and a
conflicting regular tx (sent to himself) submitted to the mempool of his local hidden (and now
disconnected) node.
The fixture value is a tuple of the submitted tx details as returned by the rpc wallet,
`(blinked_tx, hidden_tx)`.
"""
assert(chuck.balances() == (0, 0))
mike.transfer(chuck, coins(100))
net.mine()
net.sync(extra_nodes=[chuck.bridge, chuck.hidden.node], extra_wallets=[chuck, chuck.hidden])
assert chuck.balances() == coins(100, 100)
assert chuck.hidden.balances() == coins(100, 100)
# Now we disconnect chuck's bridge node, which will isolate the hidden node.
chuck.bridge.stop()
tx_blink = chuck.transfer(alice, coins(95), priority=5)
assert len(tx_blink['tx_hash_list']) == 1
blink_hash = tx_blink['tx_hash_list'][0]
time.sleep(0.5) # allow blink to propagate
# ... but it shouldn't have propagated here because this is disconnected, so we can submit a
# conflicting tx:
tx_hidden = chuck.hidden.transfer(chuck, coins(95), priority=1)
assert len(tx_hidden['tx_hash_list']) == 1
hidden_hash = tx_hidden['tx_hash_list'][0]
assert hidden_hash != blink_hash
vprint("double-spend txs: blink: {}, hidden: {}".format(blink_hash, hidden_hash))
net.sync()
alice.refresh()
assert alice.balances() == coins(95, 0)
mike_txpool = [x['id_hash'] for x in mike.node.rpc("/get_transaction_pool").json()['transactions']]
assert mike_txpool == [blink_hash]
hidden_txpool = [x['id_hash'] for x in chuck.hidden.node.rpc("/get_transaction_pool").json()['transactions']]
assert hidden_txpool == [hidden_hash]
return (tx_blink, tx_hidden)
v.verbose = pytestconfig.getoption("verbose") >= 2
return SNNetwork(datadir=tmp_path, binpath=binary_dir, sns=1, nodes=1, unstaked_sns=1)

View File

@ -0,0 +1,12 @@
from datetime import datetime
verbose = False
def vprint(*args, timestamp=True, **kwargs):
global verbose
if verbose:
if timestamp:
print(datetime.now(), end=" ")
print(*args, **kwargs)

5
tests/pyproject.toml Normal file
View File

@ -0,0 +1,5 @@
[tool.black]
line-length = 100
skip-magic-trailing-comma = true
target-version = ['py38']
include = '\.py$'