Documentation generation using docsify

- Adds script to extract markdown from the rpc comments to build a docs
  page suitable for docsify.
- Split up daemon rpc docs logically into sections
- Reformat (mostly rewrapping) various docs entries
- Add examples, many of which are live-fetched at documentation
  generation time.
- Mark various required options as `required{...}` so that they produce
  an error if not provided in a request.
This commit is contained in:
Jason Rhinelander 2021-12-10 21:16:27 -04:00
parent 6122164735
commit 965e4f05eb
No known key found for this signature in database
GPG Key ID: C4992CE7A88D4262
16 changed files with 1925 additions and 899 deletions

View File

@ -0,0 +1 @@
../../src/rpc/core_rpc_server_commands_defs.h

58
docs/daemon-rpc/make-docs.sh Executable file
View File

@ -0,0 +1,58 @@
#!/bin/bash
set -e
if [ "$(basename $(pwd))" != "daemon-rpc" ]; then
echo "Error: you must run this from the docs/daemon-rpc directory" >&2
exit 1
fi
rm -rf api
docsify init --local api
rm -f api/README.md
if [ -n "$NPM_PACKAGES" ]; then
npm_dir="$NPM_PACKAGES/lib/node_modules"
elif [ -n "$NODE_PATH" ]; then
npm_dir="$NODE_PATH"
elif [ -d "$HOME/node_modules" ]; then
npm_dir="$HOME/node_modules"
elif [ -d "/usr/local/lib/node_modules" ]; then
npm_dir="/usr/local/lib/node_modules"
else
echo "Can't determine your node_modules path; set NPM_PACKAGES or NODE_PATH appropriately" >&2
exit 1
fi
cp $npm_dir/docsify/node_modules/prismjs/components/prism-{json,python}.min.js api/vendor
./rpc-to-markdown.py core_rpc_server_commands_defs.h "$@"
perl -ni.bak -e '
BEGIN { $first = 0; }
if (m{^\s*<script>\s*$} .. m{^\s*</script>\s*$}) {
if (not $first) {
$first = false;
print qq{
<script>
window.\$docsify = {
name: "Oxen Daemon RPC",
repo: "https://github.com/oxen-io/oxen-core",
loadSidebar: "sidebar.md",
subMaxLevel: 2,
homepage: "index.md",
}
</script>\n};
}
} else {
s{<title>.*</title>}{<title>Oxen Daemon RPC</title>};
s{(name="description" content=)"[^"]*"}{$1"Oxen Daemon RPC endpoint documentation"};
if (m{^\s*</body>}) {
print qq{
<script src="vendor/prism-json.min.js"></script>
<script src="vendor/prism-python.min.js"></script>\n};
}
print;
}' api/index.html

View File

@ -0,0 +1,376 @@
#!/usr/bin/env python3
import sys
import os
import shutil
import re
import fileinput
from enum import Enum, auto
import json
import requests
import argparse
parser = argparse.ArgumentParser()
parser.add_argument(
"-L",
"--markdown-level",
type=int,
choices=[1, 2, 3, 4],
default=2,
help="Specify a heading level for the top-level endpoints; the default is 2, which means "
"endpoints start in a `## name` section. For example, 3 would start endpoints with `### name` "
"instead.",
)
parser.add_argument("--out", "-o", metavar='DIR', default="api", help="Output directory for generated endpoints")
parser.add_argument("--disable-public", action='store_true', help="disable PUBLIC endpoint detection (and disable marking endpoints as requiring admin)")
parser.add_argument("--disable-no-args", action='store_true', help="disable NO_ARGS enforcement of `Inputs: none`")
parser.add_argument("--dev", action='store_true', help="generate dev mode docs, which include endpoints marked 'Dev-RPC'")
parser.add_argument("--no-sort", "-S", action='store_true', help="disable sorting endpoints by name (use file order)")
parser.add_argument("--no-group", "-G", action='store_true', help="disable grouping endpoints by category")
parser.add_argument("--no-emdash", "-M", action='store_true', help="disable converting ' -- ' to '' (em-dashes)")
parser.add_argument("--rpc", metavar='URL', default="http://public-na.optf.ngo:22023", help="URL to a running oxend RPC node for live example fetching")
parser.add_argument("filename", nargs="+")
args = parser.parse_args()
for f in args.filename:
if not os.path.exists(f):
parser.error(f"{f} does not exist!")
# We parse the file looking for `///` comment blocks beginning with "RPC: <cat>/<name>".
#
# <name> is the RPC endpoint name to use in the documentation (alternative names can be specified
# using "Old names:"; see below).
#
# <cat> is the category for grouping endpoints together.
#
# Following comment lines are then a Markdown long description, until we find one or more of:
#
# "Inputs: none."
# "Outputs: none."
# "Inputs:" followed by markdown (typically an unordered list) until the next match from this list.
# "Outputs:" followed by markdown
# "Example input:" followed by a code block (i.e. containing json)
# "Example output:" followed by a code block (i.e. json output)
# "Example-JSON-Fetch" goes and fetches the endpoint (live) with the previous example input as the
# "params" value (or no params if "Inputs: none").
# "Old names: a, b, c"
#
# subject to the following rules:
# - each section must have exactly one Input; if the type inherits NO_ARGS then it *must* be an
# "Inputs: none".
# - each section must have exactly one Output
# - "Example input:" section must be immediately followed by an "Example output"
# - "Example output:" sections are permitted without a preceding example input only if the endpoint
# takes no inputs.
# - 0 or more example pairs are permitted.
# - Old names is permitted only once, if it occurs at all; the given names will be indicated as
# deprecated, old names for the endpoint.
#
# Immediately following the command we expect to find a not-only-comment line (e.g. `struct
# <whatever>`) and apply some checks to this:
# - if the line does *not* contain the word `PUBLIC` then we mark the endpoint as requiring admin
# access in its description.
# - if the line contains the word `NO_ARGS` then we double-check that "Inputs: none" was also given
# and error if a more complex Inputs: section was written.
hdr = '#' * args.markdown_level
MD_INPUT_HEADER = f"{hdr}# Parameters"
MD_OUTPUT_HEADER = f"{hdr}# Returns"
MD_EXAMPLES_HEADER = f"{hdr}# Examples"
MD_EXAMPLE_IN_HDR = f"{hdr}## Input"
MD_EXAMPLE_OUT_HDR = f"{hdr}## Output"
MD_EX_SINGLE_IN_HDR = f"{hdr}# Example Input"
MD_EX_SINGLE_OUT_HDR = f"{hdr}# Example Output"
MD_NO_INPUT = "This endpoint takes no inputs. _(An optional empty dict/object may be provided, but is not required.)_"
MD_ADMIN = "\n\n> _This endpoint requires admin RPC access; it is not available on public RPC servers._"
RPC_COMMENT = re.compile(r"^\s*/// ?")
RPC_START = re.compile(r"^RPC:\s*([\w/]+)(.*)$")
DEV_RPC_START = re.compile(r"^Dev-RPC:\s*([\w/]+)(.*)$")
IN_NONE = re.compile(r"^Inputs?: *[nN]one\.?$")
IN_SOME = re.compile(r"^Inputs?:\s*$")
OUT_SOME = re.compile(r"^Outputs?:\s*$")
EXAMPLE_IN = re.compile(r"^Example [iI]nputs?:\s*$")
EXAMPLE_OUT = re.compile(r"^Example [oO]utputs?:\s*$")
EXAMPLE_JSON_FETCH = re.compile(r"^Example-JSON-Fetch\s*$")
OLD_NAMES = re.compile(r"[Oo]ld [nN]ames?:")
PLAIN_NAME = re.compile(r"\w+")
PUBLIC = re.compile(r"\bPUBLIC\b")
NO_ARGS = re.compile(r"\bNO_ARGS\b")
input = fileinput.input(args.filename)
rpc_name = None
def error(msg):
print(
f"\x1b[31;1mERROR\x1b[0m[{input.filename()}:{input.filelineno()}] "
f"while parsing endpoint {rpc_name}:",
file=sys.stderr,
)
if msg and isinstance(msg, list):
for m in msg:
print(f" - {m}", file=sys.stderr)
else:
print(f" {msg}", file=sys.stderr)
sys.exit(1)
class Parsing(Enum):
DESC = auto()
INPUTS = auto()
OUTPUTS = auto()
EX_IN = auto()
EX_OUT = auto()
NONE = auto()
cur_file = None
found_some = True
endpoints = {}
while True:
line = input.readline()
if not line:
break
if cur_file is None or cur_file != input.filename():
if not found_some:
error(f"Found no parseable endpoint descriptions in {cur_file}")
cur_file = input.filename()
found_some = False
line, removed_comment = re.subn(RPC_COMMENT, "", line, count=1)
if not removed_comment:
continue
m = re.search(RPC_START, line)
if not m and args.dev:
m = re.search(DEV_RPC_START, line)
if not m:
continue
if m and m[2]:
error(f"found trailing garbage after 'RPC: m[1]': {m[2]}")
if m[1].count('/') != 1:
error(f"Found invalid RPC name: expected 'cat/name', not '{m[1]}'")
cat, rpc_name = m[1].split('/')
if args.no_group:
cat = ''
description, inputs, outputs = "", "", ""
done_desc = False
no_inputs = False
examples = []
cur_ex_in = None
old_names = []
mode = Parsing.DESC
while True:
line = input.readline()
line, removed_comment = re.subn(RPC_COMMENT, "", line, count=1)
if not removed_comment:
break
if re.search(IN_NONE, line):
if inputs:
error("found multiple Inputs:")
inputs, no_inputs, mode = MD_NO_INPUT, True, Parsing.NONE
elif re.search(IN_SOME, line):
if inputs:
error("found multiple Inputs:")
mode = Parsing.INPUTS
elif re.search(OUT_SOME, line):
if outputs:
error("found multiple Outputs:")
mode = Parsing.OUTPUTS
elif re.search(EXAMPLE_IN, line):
if cur_ex_in is not None:
error("found multiple input examples without paired output examples")
cur_ex_in = ""
mode = Parsing.EX_IN
elif re.search(EXAMPLE_OUT, line):
if not cur_ex_in and not no_inputs:
error(
"found output example without preceding input example (or 'Inputs: none.')"
)
examples.append([cur_ex_in, ""])
cur_ex_in = None
mode = Parsing.EX_OUT
elif re.search(EXAMPLE_JSON_FETCH, line):
if not cur_ex_in and not no_inputs:
error(
"found output example fetch instruction without preceding input (or 'Inputs: none.')"
)
params = None
if cur_ex_in:
params = cur_ex_in.strip()
if not params.startswith("```json\n"):
error("current example input is not tagged as json for Example-JSON-Fetch")
params = params[8:]
if not params.endswith("\n```"):
error("current example input doesn't look right (expected trailing ```)")
params = params[:-4]
try:
params = json.loads(params)
except Exception as e:
error("failed to parse json example input as json")
result = requests.post(args.rpc + "/json_rpc", json={"jsonrpc": "2.0", "id": "0", "method": rpc_name, "params": params}).json()
if 'error' in result:
error(f"JSON fetched example returned an error: {result['error']}")
elif 'result' not in result:
error(f"JSON fetched example doesn't contain a \"result\" key: {result}")
ex_out = json.dumps(result["result"], indent=2, sort_keys=True)
examples.append([cur_ex_in, f"\n```json\n{ex_out}\n```\n"])
cur_ex_in = None
mode = Parsing.NONE
elif re.search(OLD_NAMES, line):
old_names = [x.strip() for x in line.split(':', 1)[1].split(',')]
if not old_names or not all(re.fullmatch(PLAIN_NAME, n) for n in old_names):
error(f"found unparseable old names line: {line}")
elif mode == Parsing.NONE:
if line and not line.isspace():
error(f"Found unexpected content while looking for a tag: '{line}'")
elif mode == Parsing.DESC:
description += line
elif mode == Parsing.INPUTS:
inputs += line
elif mode == Parsing.OUTPUTS:
outputs += line
elif mode == Parsing.EX_IN:
cur_ex_in += line
elif mode == Parsing.EX_OUT:
examples[-1][1] += line
problems = []
# We hit the end of the commented section
if not description or inputs.isspace():
problems.append("endpoint has no description")
if not inputs or inputs.isspace():
problems.append(
"endpoint has no inputs description; perhaps you need to add 'Inputs: none.'?"
)
if not outputs or outputs.isspace():
problems.append("endpoint has no outputs description")
if cur_ex_in is not None:
problems.append(
"endpoint has a trailing example input without a following example output"
)
if not no_inputs and any(not x[0] or x[0].isspace() for x in examples):
problems.append("found one or more blank input examples")
if any(not x[1] or x[1].isspace() for x in examples):
problems.append("found one or more blank output examples")
public = args.disable_public or re.search(PUBLIC, line)
if not public:
description += MD_ADMIN
if old_names:
s = 's' if len(old_names) > 1 else ''
description += f"\n\n> _For backwards compatibility this endpoint is also accessible via the following deprecated endpoint name{s}:_"
for n in old_names:
description += f"\n> - _`{n}`_"
if not args.disable_no_args:
if re.search(NO_ARGS, line) and not no_inputs:
problems.append("found NO_ARGS, but 'Inputs: none' was specified in description")
if problems:
error(problems)
md = f"""
{hdr} `{rpc_name}`
{description}
{MD_INPUT_HEADER}
{inputs}
{MD_OUTPUT_HEADER}
{outputs}
"""
if examples:
if len(examples) > 1:
md += f"\n\n{MD_EXAMPLES_HEADER}\n\n"
for ex in examples:
if ex[0] is not None:
md += f"""
{MD_EXAMPLE_IN_HDR}
{ex[0]}
"""
md += f"""
{MD_EXAMPLE_OUT_HDR}
{ex[1]}
"""
else:
if examples[0][0] is not None:
md += f"\n\n{MD_EX_SINGLE_IN_HDR}\n\n{examples[0][0]}"
md += f"\n\n{MD_EX_SINGLE_OUT_HDR}\n\n{examples[0][1]}"
if not args.no_emdash:
md = md.replace(" -- ", "")
if cat in endpoints:
endpoints[cat].append((rpc_name, md))
else:
endpoints[cat] = [(rpc_name, md)]
if not endpoints:
error(f"Found no parseable endpoint descriptions in {cur_file}")
if not args.no_sort:
for v in endpoints.values():
v.sort(key=lambda x: x[0])
os.makedirs(args.out, exist_ok=True)
static_path = os.path.dirname(os.path.realpath(__file__)) + '/static'
for f in ('index.md', 'sidebar.md'):
shutil.copy(f"{static_path}/{f}", f"{args.out}/{f}")
print(f"Copied static/{f} => {args.out}/{f}")
preamble_prefix = static_path + '/preamble-'
for cat, eps in endpoints.items():
out = f"{args.out}/{cat}.md"
with open(out, "w") as f:
preamble = f"{preamble_prefix}{cat}.md"
if os.path.isfile(preamble):
with open(preamble, "r") as fp:
for line in fp:
f.write(line)
f.write("\n\n")
else:
print(f"Warning: {preamble} doesn't exist, writing generic preamble for {cat}", file=sys.stderr)
f.write(f"# {cat} endpoints\n\n")
for _, md in eps:
f.write(md)
print(f"Wrote {out}")

View File

@ -0,0 +1,62 @@
# Oxen Daemon RPC Endpoints
These pages describe the available RPC endpoints available from a running `oxend` node. These
endpoints are used for querying blockchain data, submitting transactions, obtaining service node
information, and controlling the running oxend.
Many of the endpoints described here are publicly accessible; those that are not are marked
accordingly and can only be used by a local administrator.
## HTTP JSON access
Accessing an endpoint using HTTP and JSON can be done by making a JSON RPC request. For example, to
call the `get_info` endpoint on a service node with an HTTP RPC listener on `localhost:22023` (the
default) using JSON RPC you would make a POST request to `http://localhost:22023/json_rpc` with a
JSON body:
```json
{
"jsonrpc": "2.0",
"id": "0",
"method": "get_info",
"params": { "foo": 123 }
}
```
The pages here describe only the values of the inner "params" value; the outer boilerplate is the
same for all requests. For methods that do not require any input parameters the `"params"` field
may be omitted entirely.
### Command-line usage
For example, to make a request using `curl` to the public RPC node `public-na.optf.ngo`, using the
command-line with `jq` to "prettify" the json response:
```
curl -sSX POST http://public-na.optf.ngo:22023/json_rpc \
-d '{"jsonrpc":"2.0","id":"0","method":"get_info"}' | jq .
```
## OxenMQ RPC access
All oxend endpoints are also available via OxenMQ at either the `rpc.ENDPOINT` or `admin.ENDPOINT`
name (the latter if the endpoint is marked admin-only), with an optional additional data part
containing a JSON or bencoded request.
### Command-line usage:
The oxen-core source code contains a script (`utils/lmq-rpc.py`) that can invoke such a request:
```
./utils/lmq-rpc.py ipc://$HOME/.oxen/oxend.sock rpc.get_info | jq .
```
to query a local oxend, or:
```
./utils/lmq-rpc.py tcp://public-na.optf.ngo:22027 02ae9aa1bdface3ce32488874d16671b04d44f611d1076033c92f3379f221161 rpc.get_info | jq .
```
or
```
./utils/lmq-rpc.py tcp://public-na.optf.ngo:22029 rpc.get_info '{}' | jq .
```
to query a public node. (The first version uses an encrypted public connection given the remote
oxend's X25519 pubkey; the second version uses an unencrypted public connection).

View File

@ -0,0 +1,4 @@
# Blockchain Endpoints
These endpoints are used for querying various data from the blockchain; unless otherwise indicated,
these are accessible using a public RPC node.

View File

@ -0,0 +1,4 @@
# Daemon Administration Endpoints
These endpoints allow for controlling and querying privileged information about the running oxend.
They require administrator access.

View File

@ -0,0 +1,4 @@
# Network Information Endpoints
These endpoints are used for querying various general information about the network that are not
(directly) blockchain data.

View File

@ -0,0 +1,3 @@
# Oxen Name System Query Endpoints
This endpoints allow querying Oxen Name System records.

View File

@ -0,0 +1,4 @@
# Service Node Administration Endpoints
These endpoints allow administering and controlling an active service node. They generally require
the queried oxend is running as a service node, and require administrator access.

View File

@ -0,0 +1,5 @@
- [Network information](network.md)
- [Blockchain data](blockchain.md)
- [Oxen Name System](ons.md)
- [Daemon administration](daemon.md)
- [Service Node administration](service_node.md)

View File

@ -1345,6 +1345,7 @@ namespace cryptonote::rpc {
h, curr_height - 1)};
get.response_hex["{}"_format(h)] = m_core.get_block_id_by_height(h);
}
get.response["height"] = curr_height;
get.response["status"] = STATUS_OK;
}
//------------------------------------------------------------------------------------------------------------------------------
@ -1625,10 +1626,6 @@ namespace cryptonote::rpc {
if (i->second > now) {
ban b;
b.host = i->first;
b.ip = 0;
uint32_t ip;
if (epee::string_tools::get_ip_int32_from_string(ip, b.host))
b.ip = ip;
b.seconds = i->second - now;
get_bans.response["bans"].push_back(b);
}
@ -1639,7 +1636,6 @@ namespace cryptonote::rpc {
if (i->second > now) {
ban b;
b.host = i->first.host_str();
b.ip = 0;
b.seconds = i->second - now;
get_bans.response["bans"].push_back(b);
}
@ -1675,32 +1671,22 @@ namespace cryptonote::rpc {
{
epee::net_utils::network_address na;
// try subnet first
if (!set_bans.request.host.empty())
auto ns_parsed = net::get_ipv4_subnet_address(set_bans.request.host);
if (ns_parsed)
{
auto ns_parsed = net::get_ipv4_subnet_address(set_bans.request.host);
if (ns_parsed)
{
if (set_bans.request.ban)
m_p2p.block_subnet(*ns_parsed, set_bans.request.seconds);
else
m_p2p.unblock_subnet(*ns_parsed);
set_bans.response["status"] = STATUS_OK;
return;
}
if (set_bans.request.ban)
m_p2p.block_subnet(*ns_parsed, set_bans.request.seconds);
else
m_p2p.unblock_subnet(*ns_parsed);
set_bans.response["status"] = STATUS_OK;
return;
}
// then host
if (!set_bans.request.host.empty())
{
auto na_parsed = net::get_network_address(set_bans.request.host, 0);
if (!na_parsed)
throw rpc_error{ERROR_WRONG_PARAM, "Unsupported host/subnet type"};
na = std::move(*na_parsed);
}
else
{
na = epee::net_utils::ipv4_network_address{set_bans.request.ip, 0};
}
auto na_parsed = net::get_network_address(set_bans.request.host, 0);
if (!na_parsed)
throw rpc_error{ERROR_WRONG_PARAM, "Unsupported host/subnet type"};
na = std::move(*na_parsed);
if (set_bans.request.ban)
m_p2p.block_host(na, set_bans.request.seconds);
else

View File

@ -2,6 +2,10 @@
namespace cryptonote::rpc {
KV_SERIALIZE_MAP_CODE_BEGIN(EMPTY)
KV_SERIALIZE_MAP_CODE_END()
void to_json(nlohmann::json& j, const GET_BLOCKS_BIN::tx_output_indices& toi)
{
j = nlohmann::json{{"indices", toi.indices}};

View File

@ -35,6 +35,8 @@
namespace cryptonote::rpc {
struct EMPTY { KV_MAP_SERIALIZABLE };
/// Specifies that the RPC call is legacy, deprecated Monero custom binary input/ouput. If not
/// given then the command is JSON/bt-encoded values. For HTTP RPC this also means the command is
/// *not* available via the HTTP JSON RPC.

View File

@ -211,17 +211,17 @@ namespace cryptonote::rpc {
void parse_request(SET_LOG_LEVEL& set_log_level, rpc_input in) {
get_values(in,
"level", set_log_level.request.level);
"level", required{set_log_level.request.level});
}
void parse_request(SET_LOG_CATEGORIES& set_log_categories, rpc_input in) {
get_values(in,
"categories", set_log_categories.request.categories);
"categories", required{set_log_categories.request.categories});
}
void parse_request(BANNED& banned, rpc_input in) {
get_values(in,
"address", banned.request.address);
"address", required{banned.request.address});
}
void parse_request(FLUSH_TRANSACTION_POOL& flush_transaction_pool, rpc_input in) {
@ -254,23 +254,23 @@ namespace cryptonote::rpc {
void parse_request(POP_BLOCKS& pop_blocks, rpc_input in){
get_values(in,
"nblocks", pop_blocks.request.nblocks);
"nblocks", required{pop_blocks.request.nblocks});
}
void parse_request(LOKINET_PING& lokinet_ping, rpc_input in){
get_values(in,
"ed25519_pubkey", lokinet_ping.request.ed25519_pubkey,
"error", lokinet_ping.request.error,
"version", lokinet_ping.request.version);
"version", required{lokinet_ping.request.version});
}
void parse_request(STORAGE_SERVER_PING& storage_server_ping, rpc_input in){
get_values(in,
"ed25519_pubkey", storage_server_ping.request.ed25519_pubkey,
"ed25519_pubkey", required{storage_server_ping.request.ed25519_pubkey},
"error", storage_server_ping.request.error,
"https_port", storage_server_ping.request.https_port,
"omq_port", storage_server_ping.request.omq_port,
"version", storage_server_ping.request.version);
"https_port", required{storage_server_ping.request.https_port},
"omq_port", required{storage_server_ping.request.omq_port},
"version", required{storage_server_ping.request.version});
}
void parse_request(PRUNE_BLOCKCHAIN& prune_blockchain, rpc_input in){
@ -313,10 +313,9 @@ namespace cryptonote::rpc {
void parse_request(SET_BANS& set_bans, rpc_input in) {
get_values(in,
"ban", set_bans.request.ban,
"host", set_bans.request.host,
"ip", set_bans.request.ip,
"seconds", set_bans.request.seconds);
"ban", required{set_bans.request.ban},
"host", required{set_bans.request.host},
"seconds", required{set_bans.request.seconds});
}
void parse_request(GET_STAKING_REQUIREMENT& get_staking_requirement, rpc_input in) {
@ -362,7 +361,7 @@ namespace cryptonote::rpc {
void parse_request(ONS_OWNERS_TO_NAMES& ons_owners_to_names, rpc_input in) {
get_values(in,
"entries", ons_owners_to_names.request.entries,
"entries", required{ons_owners_to_names.request.entries},
"include_expired", ons_owners_to_names.request.include_expired);
}
@ -391,8 +390,8 @@ namespace cryptonote::rpc {
void parse_request(GET_SERVICE_NODE_REGISTRATION_CMD_RAW& cmd, rpc_input in) {
get_values(in,
"args", cmd.request.args,
"args", required{cmd.request.args},
"make_friendly", cmd.request.make_friendly,
"staking_requirement", cmd.request.staking_requirement);
"staking_requirement", required{cmd.request.staking_requirement});
}
}

View File

@ -182,34 +182,6 @@ void to_json(nlohmann::json& j, const ONS_OWNERS_TO_NAMES::response_entry& r)
};
}
KV_SERIALIZE_MAP_CODE_BEGIN(EMPTY)
KV_SERIALIZE_MAP_CODE_END()
KV_SERIALIZE_MAP_CODE_BEGIN(block_header_response)
KV_SERIALIZE(major_version)
KV_SERIALIZE(minor_version)
KV_SERIALIZE(timestamp)
KV_SERIALIZE(prev_hash)
KV_SERIALIZE(nonce)
KV_SERIALIZE(orphan_status)
KV_SERIALIZE(height)
KV_SERIALIZE(depth)
KV_SERIALIZE(hash)
KV_SERIALIZE(difficulty)
KV_SERIALIZE(cumulative_difficulty)
KV_SERIALIZE(reward)
KV_SERIALIZE(coinbase_payouts)
KV_SERIALIZE(block_size)
KV_SERIALIZE_OPT(block_weight, (uint64_t)0)
KV_SERIALIZE(num_txes)
KV_SERIALIZE(pow_hash)
KV_SERIALIZE_OPT(long_term_weight, (uint64_t)0)
KV_SERIALIZE(miner_tx_hash)
KV_SERIALIZE(tx_hashes)
KV_SERIALIZE(service_node_winner)
KV_SERIALIZE_MAP_CODE_END()
KV_SERIALIZE_MAP_CODE_BEGIN(GET_OUTPUT_DISTRIBUTION::request)
KV_SERIALIZE(amounts)

File diff suppressed because it is too large Load Diff