persist should work
This commit is contained in:
parent
1aab0de425
commit
3c3df3776e
13 changed files with 225 additions and 173 deletions
|
@ -17,6 +17,10 @@ pip install -r requirements.txt
|
|||
cd lib/chiavdf/fast_vdf
|
||||
# Install libgmp, libboost, and libflint, and then run the following
|
||||
sh install.sh
|
||||
|
||||
# Install mongoDB from https://docs.mongodb.com/manual/administration/install-community/
|
||||
# Then, ensure that the mongod service is running.
|
||||
mongod --dbpath ./db/
|
||||
```
|
||||
|
||||
### Generate keys
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
asyncssh==2.0.0
|
||||
atomicwrites==1.3.0
|
||||
attrs==19.3.0
|
||||
autoflake==1.3.1
|
||||
bitstring==3.1.6
|
||||
black==19.10b0
|
||||
blspy==0.1.12
|
||||
cbor2==4.1.2
|
||||
cffi==1.13.1
|
||||
|
@ -10,6 +12,7 @@ cryptography==2.8
|
|||
entrypoints==0.3
|
||||
flake8==3.7.9
|
||||
importlib-metadata==0.23
|
||||
isort==4.3.21
|
||||
Mako==1.1.0
|
||||
MarkupSafe==1.1.1
|
||||
mccabe==0.6.1
|
||||
|
|
2
scripts/stop_all_servers.sh
Executable file
2
scripts/stop_all_servers.sh
Executable file
|
@ -0,0 +1,2 @@
|
|||
ps -e | grep python | awk '{print $1}' | xargs -L1 kill
|
||||
ps -e | grep "vdf_server" | awk '{print $1}' | xargs -L1 kill
|
6
setup.py
6
setup.py
|
@ -1,8 +1,8 @@
|
|||
#!/usr/bin/python3
|
||||
from setuptools import setup
|
||||
|
||||
dependencies = ['blspy', 'cbor2', 'pyyaml', 'asyncssh']
|
||||
dev_dependencies = ['pytest', 'flake8', 'mypy', 'pytest-asyncio']
|
||||
dependencies = ["blspy", "cbor2", "pyyaml", "asyncssh"]
|
||||
dev_dependencies = ["pytest", "flake8", "mypy", "isort", "autoflake", "black", "pytest-asyncio"]
|
||||
|
||||
setup(
|
||||
name='chiablockchain',
|
||||
|
@ -14,6 +14,6 @@ setup(
|
|||
python_requires='>=3.7, <4',
|
||||
keywords='chia blockchain node',
|
||||
install_requires=dependencies + dev_dependencies,
|
||||
long_description=open('README.md').read(),
|
||||
long_description=open("README.md").read(),
|
||||
zip_safe=False,
|
||||
)
|
||||
|
|
|
@ -1,26 +1,39 @@
|
|||
from abc import ABC
|
||||
from typing import Optional, Tuple, AsyncGenerator
|
||||
import asyncio
|
||||
import motor.motor_asyncio as maio
|
||||
from src.types.proof_of_space import ProofOfSpace
|
||||
from abc import ABC
|
||||
from motor import motor_asyncio
|
||||
from src.types.header import HeaderData
|
||||
from src.types.header_block import HeaderBlock
|
||||
from src.types.body import Body
|
||||
from typing import AsyncGenerator, Dict, List, Optional, Tuple
|
||||
from bson.binary import Binary
|
||||
from bson.codec_options import CodecOptions, TypeRegistry
|
||||
from src.types.full_block import FullBlock
|
||||
from src.types.proof_of_space import ProofOfSpace
|
||||
from src.types.sized_bytes import bytes32
|
||||
from src.types.trunk_block import TrunkBlock
|
||||
from src.util.ints import uint32, uint64
|
||||
from src.db.codecs import codec_options
|
||||
import subprocess
|
||||
from src.util.streamable import Streamable
|
||||
|
||||
|
||||
class Database(ABC):
|
||||
# All databases must subclass this so that there's one client
|
||||
# Ensure mongod service is running
|
||||
loop = asyncio.get_event_loop()
|
||||
subprocess.run("mongod", stdout=subprocess.PIPE) # Ensure mongod service is running
|
||||
client = maio.AsyncIOMotorClient("mongodb://localhost:27017/", io_loop=loop)
|
||||
client = motor_asyncio.AsyncIOMotorClient(
|
||||
"mongodb://localhost:27017/", io_loop=loop
|
||||
)
|
||||
|
||||
def __init__(self, db_name):
|
||||
# self.lock = asyncio.Lock()
|
||||
self.db = Database.client.get_database(db_name, codec_options=codec_options)
|
||||
self.db = Database.client.get_database(
|
||||
db_name,
|
||||
codec_options=CodecOptions(
|
||||
type_registry=TypeRegistry(
|
||||
fallback_encoder=lambda obj: Binary(bytes(obj))
|
||||
if isinstance(obj, Streamable)
|
||||
else obj
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class FullNodeStore(Database):
|
||||
|
@ -28,24 +41,22 @@ class FullNodeStore(Database):
|
|||
super().__init__(db_name)
|
||||
|
||||
# Stored on database
|
||||
getc = self.db.get_collection
|
||||
self.full_blocks = getc("full_blocks")
|
||||
self.potential_heads = getc("potential_heads")
|
||||
self.potential_trunks = getc("potential_trunks")
|
||||
self.potential_blocks = getc("potential_blocks")
|
||||
self.candidate_blocks = getc("candidate_blocks")
|
||||
self.unfinished_blocks = getc("unfinished_blocks")
|
||||
self.unfinished_blocks_leader = getc("unfinished_blocks_leader")
|
||||
self.sync_mode = getc("sync_mode")
|
||||
self.full_blocks = self.db.get_collection("full_blocks")
|
||||
self.potential_heads = self.db.get_collection("potential_heads")
|
||||
self.potential_trunks = self.db.get_collection("potential_trunks")
|
||||
self.potential_blocks = self.db.get_collection("potential_blocks")
|
||||
self.candidate_blocks = self.db.get_collection("candidate_blocks")
|
||||
self.unfinished_blocks = self.db.get_collection("unfinished_blocks")
|
||||
self.sync_mode = self.db.get_collection("sync_mode")
|
||||
|
||||
# Stored on memory
|
||||
self.potential_blocks_received: Dict[uint32, Event] = {}
|
||||
# Stored in memory
|
||||
self.unfinished_blocks_leader = None
|
||||
self.potential_blocks_received: Dict[uint32, asyncio.Event] = {}
|
||||
self.proof_of_time_estimate_ips: uint64 = uint64(3000)
|
||||
|
||||
# Lock
|
||||
self.lock = asyncio.Lock() # external
|
||||
|
||||
|
||||
async def _clear_database(self):
|
||||
await self.full_blocks.drop()
|
||||
await self.potential_heads.drop()
|
||||
|
@ -53,7 +64,6 @@ class FullNodeStore(Database):
|
|||
await self.potential_blocks.drop()
|
||||
await self.candidate_blocks.drop()
|
||||
await self.unfinished_blocks.drop()
|
||||
await self.unfinished_blocks_leader.drop()
|
||||
await self.sync_mode.drop()
|
||||
|
||||
async def save_block(self, block: FullBlock) -> None:
|
||||
|
@ -68,6 +78,7 @@ class FullNodeStore(Database):
|
|||
query = await self.full_blocks.find_one({"_id": header_hash})
|
||||
if query is not None:
|
||||
return FullBlock.from_bytes(query["block"])
|
||||
return None
|
||||
|
||||
async def get_blocks(self) -> AsyncGenerator[FullBlock, None]:
|
||||
async for query in self.full_blocks.find({}):
|
||||
|
@ -95,13 +106,15 @@ class FullNodeStore(Database):
|
|||
async def get_potential_heads_number(self) -> int:
|
||||
return await self.potential_heads.count_documents({})
|
||||
|
||||
async def get_potential_heads_tuples(self) -> AsyncGenerator[Tuple[bytes32, FullBlock], None]:
|
||||
async def get_potential_heads_tuples(self) -> List[Tuple[bytes32, FullBlock]]:
|
||||
ans = []
|
||||
async for query in self.potential_heads.find({}):
|
||||
if query and "block" in query:
|
||||
block = FullBlock.from_bytes(query["block"])
|
||||
else:
|
||||
block = None
|
||||
yield bytes32(query["_id"]), block
|
||||
ans.append((bytes32(query["_id"]), block))
|
||||
return ans
|
||||
|
||||
async def add_potential_head(
|
||||
self, header_hash: bytes32, block: Optional[FullBlock] = None
|
||||
|
@ -184,86 +197,14 @@ class FullNodeStore(Database):
|
|||
query = await self.unfinished_blocks.find_one({"_id": code})
|
||||
return FullBlock.from_bytes(query["block"]) if query else None
|
||||
|
||||
async def set_unfinished_block_leader(self, key: Tuple[bytes32, uint64]) -> None:
|
||||
await self.unfinished_blocks_leader.find_one_and_update(
|
||||
{"_id": 0},
|
||||
{"$set": {"_id": 0, "header": key[0], "iters": key[1]}},
|
||||
upsert=True,
|
||||
)
|
||||
def set_unfinished_block_leader(self, key: Tuple[bytes32, uint64]) -> None:
|
||||
self.unfinished_blocks_leader = key
|
||||
|
||||
async def get_unfinished_block_leader(self) -> Optional[Tuple[bytes32, uint64]]:
|
||||
query = await self.unfinished_blocks_leader.find_one({"_id": 0})
|
||||
return (query["header"], query["iters"]) if query else None
|
||||
def get_unfinished_block_leader(self) -> Optional[Tuple[bytes32, uint64]]:
|
||||
return self.unfinished_blocks_leader
|
||||
|
||||
async def set_proof_of_time_estimate_ips(self, estimate: uint64):
|
||||
self.proof_of_time_estimate_ips = estimate
|
||||
|
||||
async def get_proof_of_time_estimate_ips(self) -> uint64:
|
||||
return self.proof_of_time_estimate_ips
|
||||
|
||||
|
||||
# TODO: remove below when tested better
|
||||
if __name__ == "__main__":
|
||||
|
||||
async def tests():
|
||||
print("started testing")
|
||||
db = FullNodeStore("test3")
|
||||
await db._clear_database()
|
||||
|
||||
from src.consensus.constants import constants
|
||||
|
||||
genesis = FullBlock.from_bytes(constants["GENESIS_BLOCK"])
|
||||
|
||||
# Save/get block
|
||||
await db.save_block(genesis)
|
||||
assert genesis == await db.get_block(genesis.header_hash)
|
||||
|
||||
# Save/get sync
|
||||
for sync_mode in (False, True):
|
||||
await db.set_sync_mode(sync_mode)
|
||||
assert sync_mode == await db.get_sync_mode()
|
||||
|
||||
# clear sync info
|
||||
await db.clear_sync_info()
|
||||
assert await db.get_potential_heads_number() == 0
|
||||
|
||||
# add/get potential head, get potential heads num
|
||||
await db.add_potential_head(genesis.header_hash)
|
||||
assert await db.get_potential_heads_number() == 1
|
||||
await db.add_potential_head(genesis.header_hash, genesis)
|
||||
assert await db.get_potential_heads_number() == 1
|
||||
assert genesis == await db.get_potential_head(genesis.header_hash)
|
||||
|
||||
# add/get potential header
|
||||
header = genesis.header_block
|
||||
await db.add_potential_header(header)
|
||||
assert await db.get_potential_header(genesis.height) == header
|
||||
|
||||
# Add potential block
|
||||
await db.add_potential_block(genesis)
|
||||
assert genesis == await db.get_potential_block(uint32(0))
|
||||
|
||||
# Add/get candidate block
|
||||
assert await db.get_candidate_block(0) is None
|
||||
partial = (
|
||||
genesis.body,
|
||||
genesis.header_block.header.data,
|
||||
genesis.header_block.proof_of_space,
|
||||
)
|
||||
await db.add_candidate_block(genesis.header_hash, *partial)
|
||||
assert await db.get_candidate_block(genesis.header_hash) == partial
|
||||
|
||||
# Add/get unfinished block
|
||||
key = (genesis.header_hash, uint64(1000))
|
||||
assert await db.get_unfinished_block(key) is None
|
||||
await db.add_unfinished_block(key, genesis)
|
||||
assert await db.get_unfinished_block(key) == genesis
|
||||
|
||||
# Set/get unf block leader
|
||||
assert await db.get_unfinished_block_leader() is None
|
||||
await db.set_unfinished_block_leader(key)
|
||||
assert await db.get_unfinished_block_leader() == key
|
||||
|
||||
print("done testing")
|
||||
|
||||
#Database.loop.run_until_complete(tests())
|
|
@ -1,15 +0,0 @@
|
|||
from bson.binary import Binary
|
||||
from src.util.streamable import Streamable
|
||||
from bson.codec_options import TypeRegistry
|
||||
from bson.codec_options import CodecOptions
|
||||
|
||||
|
||||
def fallback_encoder(obj):
|
||||
if isinstance(obj, Streamable):
|
||||
return Binary(bytes(obj))
|
||||
return obj
|
||||
|
||||
|
||||
codec_options = CodecOptions(
|
||||
type_registry=TypeRegistry(fallback_encoder=fallback_encoder)
|
||||
)
|
|
@ -1,13 +1,9 @@
|
|||
from typing import Callable, List, Optional
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Callable, List, Optional
|
||||
|
||||
import asyncssh
|
||||
from prompt_toolkit.layout.dimension import D
|
||||
from prompt_toolkit.contrib.ssh import PromptToolkitSSHServer
|
||||
from prompt_toolkit.key_binding.bindings.focus import (
|
||||
focus_next,
|
||||
focus_previous,
|
||||
)
|
||||
|
||||
from prompt_toolkit import Application
|
||||
from prompt_toolkit.styles import Style
|
||||
from prompt_toolkit.layout.containers import VSplit, HSplit, Window
|
||||
|
@ -122,14 +118,14 @@ class FullNodeUI:
|
|||
|
||||
def setup_keybindings(self) -> KeyBindings:
|
||||
kb = KeyBindings()
|
||||
kb.add('tab')(focus_next)
|
||||
kb.add('s-tab')(focus_previous)
|
||||
kb.add('down')(focus_next)
|
||||
kb.add('up')(focus_previous)
|
||||
kb.add('right')(focus_next)
|
||||
kb.add('left')(focus_previous)
|
||||
kb.add("tab")(focus_next)
|
||||
kb.add("s-tab")(focus_previous)
|
||||
kb.add("down")(focus_next)
|
||||
kb.add("up")(focus_previous)
|
||||
kb.add("right")(focus_next)
|
||||
kb.add("left")(focus_previous)
|
||||
|
||||
@kb.add('c-c')
|
||||
@kb.add("c-c")
|
||||
def exit_(event):
|
||||
self.close()
|
||||
return kb
|
||||
|
@ -148,7 +144,7 @@ class FullNodeUI:
|
|||
self.ips_label = TextArea(focusable=False, height=1)
|
||||
self.total_iters_label = TextArea(focusable=False, height=2)
|
||||
self.con_rows = []
|
||||
self.connections_msg = Label(text=f'Connections')
|
||||
self.connections_msg = Label(text=f"Connections")
|
||||
self.connection_rows_vsplit = Window()
|
||||
self.add_connection_msg = Label(text=f'Add a connection ip:port')
|
||||
self.add_connection_field = TextArea(height=1, prompt='>>> ', style='class:input-field',
|
||||
|
@ -173,11 +169,11 @@ class FullNodeUI:
|
|||
self.challenge_msg = Label(text=f'Block Header')
|
||||
self.challenge = TextArea(focusable=False)
|
||||
|
||||
body = HSplit([self.loading_msg, self.server_msg],
|
||||
height=D(), width=D())
|
||||
body = HSplit([self.loading_msg, self.server_msg], height=D(), width=D())
|
||||
self.content = Frame(title="Chia Full Node", body=body)
|
||||
self.layout = Layout(VSplit([self.content], height=D(), width=D()))
|
||||
|
||||
|
||||
def change_route_handler(self, route):
|
||||
def change_route():
|
||||
self.prev_route = self.route
|
||||
|
@ -223,7 +219,9 @@ class FullNodeUI:
|
|||
added_blocks.append(max_block)
|
||||
heads.remove(max_block)
|
||||
async with self.store.lock:
|
||||
prev: Optional[FullBlock] = await self.store.get_block(max_block.prev_header_hash)
|
||||
prev: Optional[FullBlock] = await self.store.get_block(
|
||||
max_block.prev_header_hash
|
||||
)
|
||||
if prev is not None:
|
||||
heads.append(prev.header_block)
|
||||
return added_blocks
|
||||
|
@ -236,7 +234,6 @@ class FullNodeUI:
|
|||
labels = [row.children[0].content.text() for row in self.con_rows]
|
||||
if con_str not in labels:
|
||||
con_label = Label(text=con_str)
|
||||
|
||||
def disconnect():
|
||||
con.close()
|
||||
self.layout.focus(self.quit_button)
|
||||
|
@ -245,7 +242,9 @@ class FullNodeUI:
|
|||
row = VSplit([con_label, disconnect_button])
|
||||
self.con_rows.append(row)
|
||||
|
||||
new_con_rows = [row for row in self.con_rows if row.children[0].content.text() in con_strs]
|
||||
new_con_rows = [
|
||||
row for row in self.con_rows if row.children[0].content.text() in con_strs
|
||||
]
|
||||
if new_con_rows != self.con_rows:
|
||||
self.con_rows = new_con_rows
|
||||
if len(self.con_rows) > 0:
|
||||
|
@ -258,13 +257,13 @@ class FullNodeUI:
|
|||
else:
|
||||
new_con_rows = Window(width=D(), height=0)
|
||||
|
||||
async with self.store.lock():
|
||||
if (await self.store.get_sync_mode()):
|
||||
async with self.store.lock:
|
||||
if await self.store.get_sync_mode():
|
||||
max_height = -1
|
||||
for _, block in await self.store.get_potential_heads_tuples():
|
||||
if block.height > max_height:
|
||||
max_height = block.height
|
||||
|
||||
|
||||
if max_height >= 0:
|
||||
self.syncing.text = f"Syncing up to {max_height}"
|
||||
else:
|
||||
|
@ -274,10 +273,14 @@ class FullNodeUI:
|
|||
heads: List[HeaderBlock] = self.blockchain.get_current_tips()
|
||||
lca_block: FullBlock = self.blockchain.lca_block
|
||||
if lca_block.height > 0:
|
||||
difficulty = await self.blockchain.get_next_difficulty(lca_block.prev_header_hash)
|
||||
difficulty = await self.blockchain.get_next_difficulty(
|
||||
lca_block.prev_header_hash
|
||||
)
|
||||
ips = await self.blockchain.get_next_ips(lca_block.prev_header_hash)
|
||||
else:
|
||||
difficulty = await self.blockchain.get_next_difficulty(lca_block.header_hash)
|
||||
difficulty = await self.blockchain.get_next_difficulty(
|
||||
lca_block.header_hash
|
||||
)
|
||||
ips = await self.blockchain.get_next_ips(lca_block.header_hash)
|
||||
total_iters = lca_block.header_block.challenge.total_iters
|
||||
latest_blocks: List[HeaderBlock] = await self.get_latest_blocks(heads)
|
||||
|
@ -287,12 +290,17 @@ class FullNodeUI:
|
|||
self.latest_blocks_labels[i].text = (
|
||||
f"{b.height}:{b.header_hash}"
|
||||
f" {'LCA' if b.header_hash == lca_block.header_hash else ''}"
|
||||
f" {'HEAD' if b.header_hash in [h.header_hash for h in heads] else ''}")
|
||||
self.latest_blocks_labels[i].handler = self.change_route_handler(f"block/{b.header_hash}")
|
||||
f" {'HEAD' if b.header_hash in [h.header_hash for h in heads] else ''}"
|
||||
)
|
||||
self.latest_blocks_labels[i].handler = self.change_route_handler(
|
||||
f"block/{b.header_hash}"
|
||||
)
|
||||
new_labels.append(self.latest_blocks_labels[i])
|
||||
|
||||
self.lca_label.text = f"Current least common ancestor {lca_block.header_hash} height {lca_block.height}"
|
||||
self.current_heads_label.text = "Heights of heads: " + str([h.height for h in heads])
|
||||
self.current_heads_label.text = "Heights of heads: " + str(
|
||||
[h.height for h in heads]
|
||||
)
|
||||
self.difficulty_label.text = f"Current difficuty: {difficulty}"
|
||||
self.ips_label.text = f"Current VDF iterations per second: {ips}"
|
||||
self.total_iters_label.text = f"Total iterations since genesis: {total_iters}"
|
||||
|
@ -320,7 +328,9 @@ class FullNodeUI:
|
|||
async def draw_block(self):
|
||||
block_hash: str = self.route.split("block/")[1]
|
||||
async with self.store.lock:
|
||||
block: Optional[FullBlock] = await self.store.get_block(bytes32(bytes.fromhex(block_hash)))
|
||||
block: Optional[FullBlock] = await self.store.get_block(
|
||||
bytes32(bytes.fromhex(block_hash))
|
||||
)
|
||||
if block is not None:
|
||||
self.block_msg.text = f"Block {str(block.header_hash)}"
|
||||
if self.block_label.text != str(block):
|
||||
|
|
|
@ -1,9 +1,13 @@
|
|||
import time
|
||||
import pytest
|
||||
import asyncio
|
||||
from typing import Dict, Any
|
||||
import time
|
||||
from typing import Any, Dict
|
||||
|
||||
import pytest
|
||||
from blspy import PrivateKey
|
||||
|
||||
from src.blockchain import Blockchain, ReceiveBlockResult
|
||||
from src.consensus.constants import constants
|
||||
|
||||
from src.types.coinbase import CoinbaseInfo
|
||||
from src.types.body import Body
|
||||
from src.types.proof_of_space import ProofOfSpace
|
||||
|
@ -14,8 +18,8 @@ from src.types.header import HeaderData
|
|||
from src.blockchain import Blockchain, ReceiveBlockResult
|
||||
from src.db.database import FullNodeStore
|
||||
from src.util.ints import uint64, uint32
|
||||
from tests.block_tools import BlockTools
|
||||
|
||||
from tests.block_tools import BlockTools
|
||||
|
||||
bt = BlockTools()
|
||||
|
||||
|
@ -27,10 +31,12 @@ test_constants: Dict[str, Any] = {
|
|||
"DIFFICULTY_FACTOR": 3,
|
||||
"DIFFICULTY_EPOCH": 12, # The number of blocks per epoch
|
||||
"DIFFICULTY_WARP_FACTOR": 4, # DELAY divides EPOCH in order to warp efficiently.
|
||||
"DIFFICULTY_DELAY": 3 # EPOCH / WARP_FACTOR
|
||||
"DIFFICULTY_DELAY": 3, # EPOCH / WARP_FACTOR
|
||||
}
|
||||
test_constants["GENESIS_BLOCK"] = bytes(bt.create_genesis_block(test_constants, bytes([0]*32), b'0'))
|
||||
#test_constants["GENESIS_BLOCK"] = b'\x15N3\xd3\xf9H\xc2K\x96\xfe\xf2f\xa2\xbf\x87\x0e\x0f,\xd0\xd4\x0f6s\xb1".\\\xf5\x8a\xb4\x03\x84\x8e\xf9\xbb\xa1\xca\xdef3:\xe4?\x0c\xe5\xc6\x12\x80\x17\xd2\xcc\xd7\xb4m\x94\xb7V\x959\xed4\x89\x04b\x08\x07^\xca`\x8f#%\xe9\x9c\x9d\x86y\x10\x96W\x9d\xce\xc1\x15r\x97\x91U\n\x11<\xdf\xb2\xfc\xfb<\x13\x00\x00\x00\x98\xf4\x88\xcb\xb2MYo]\xaf \xd8a>\x06\xfe\xc8F\x8d\x15\x90\x15\xbb\x04\xd48\x10\xc6\xd8b\x82\x88\x7fx<\xe5\xe6\x8b\x8f\x84\xdd\x1cU"\x83\xfb7\x9d`\xb0I\xb3\xbe;bvE\xc6\x92\xdd\xbe\x988\xe9y;\xc6.\xa1\xce\x94\xdc\xd8\xab\xaf\xba\x8f\xd8r\x8br\xc8\xa0\xac\xc0\xe9T\x87\x08\x08\x8b#-\xb6o\xf0\x1f\x0bzv\xb3\x81\x1a\xd4\xf7\x01\xdf\xc5A\x11\xe0\x0c\xc0\x87\xa6\xc2v\xbbR\xc4{"\xa5\xe5\xe0bx7\xfa\n\xae\xea\xfe\x02\xac\xef\xec\xd1\xc2\xc55\x06{\xe1\x0c\xb2\x99q\xd7\xd8\xcb\x97\x86\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1f\xeb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00Y\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x007\x03\x00\x00\x004\x00\x8c\xff\xc3\x00\x00\x00\x00\x00\x00\x01T\x00>\xff\xe3\x00\x80\x00[\x00\x00\x00\x00\x00\x00\x05R\x00\x08\x00\x05\x00j\xff\xfd\x00\x00\x00\x00\x00\x00\x17\xf0\x00j\xff\x99\x00j\x00\x03\x01\x03\xa1\xde8\x0f\xb75VB\xf6"`\x94\xc7\x0b\xaa\x1f\xa2Nv\x8a\xf9\xc9\x9a>\x13\xa3a\xc8\x0c\xcb?\x968\xc7\xeb\xc3\x10a\x1a\xa7\xfb\x85\xa7iu\x14`\x8f\x90\x16o\x97\xd5\t\xa4,\xe5\xed\xe1\x15\x86<\x9d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x1f\xeb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00]\xbf\xd7f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\xa1\xde8\x0f\xb75VB\xf6"`\x94\xc7\x0b\xaa\x1f\xa2Nv\x8a\xf9\xc9\x9a>\x13\xa3a\xc8\x0c\xcb?\x13\x16J\xe5\xfc\xa9\x06\xe8A\xe9\xc0Ql\xfb\xaeF\xcd\xd6\xa7\x8ei\xc4\xfa\xd4i\x84\xee\xc9\xe2\xaa\xa4f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00OB!\x81)\xf0l\xbcg\xa3^\xef\x0e\xfc\xb7\x02\x80\xe4\xa9NO\x89\xa0\t\xc3C\xd9\xda\xff\xd7\t\xeebfC&8\x9c+n$\x00\xa4\xe85\x19\xb0\xf6\x18\xa1\xeeR\xae\xec \x82k\xe0v@;\x1c\xc14PMh\xfb\xe3\x1c\xbf\x84O\xcd\xbc\xc4\xb8\xeabz`\xf7\x06;\xf6q\x8b,\x18\tf~\xd1\x11l#\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\n\x8b)\xaa\x96x8\xd76J\xa6\x8b[\x98\t\xe0\\\xe3^7qD\x8c\xf5q\x08\xf2\xa2\xc9\xb03mvU\x1a\xe2\x181\x88\xfe\t\x03?\x12\xadj\x9d\xe8K\xb8!\xee\xe7e8\x82\xfb$\xf0Y\xfaJ\x10\x1f\x1a\xe5\xe9\xa8\xbb\xea\x87\xfc\xb12y\x94\x8d,\x16\xe4C\x02\xba\xe6\xac\x94{\xc4c\x07(\xb8\xeb\xab\xe3\xcfy{6\x98\t\xf4\x8fm\xd62\x85\x87\xb0\x03f\x01B]\xe3\xc6\x13l6\x8d\x0e\x18\xc64%\x97\x1a\xa6\xf4\x8b)\xaa\x96x8\xd76J\xa6\x8b[\x98\t\xe0\\\xe3^7qD\x8c\xf5q\x08\xf2\xa2\xc9\xb03mv\x00\x00\x00\x00\x00\x00\x00\x00\x00_\xec\xebf\xff\xc8o8\xd9Rxlmily\xc2\xdb\xc29\xddN\x91\xb4g)\xd7:\'\xfbW\xe9'
|
||||
test_constants["GENESIS_BLOCK"] = bytes(
|
||||
bt.create_genesis_block(test_constants, bytes([0] * 32), b"0")
|
||||
)
|
||||
# test_constants["GENESIS_BLOCK"] = b'\x15N3\xd3\xf9H\xc2K\x96\xfe\xf2f\xa2\xbf\x87\x0e\x0f,\xd0\xd4\x0f6s\xb1".\\\xf5\x8a\xb4\x03\x84\x8e\xf9\xbb\xa1\xca\xdef3:\xe4?\x0c\xe5\xc6\x12\x80\x17\xd2\xcc\xd7\xb4m\x94\xb7V\x959\xed4\x89\x04b\x08\x07^\xca`\x8f#%\xe9\x9c\x9d\x86y\x10\x96W\x9d\xce\xc1\x15r\x97\x91U\n\x11<\xdf\xb2\xfc\xfb<\x13\x00\x00\x00\x98\xf4\x88\xcb\xb2MYo]\xaf \xd8a>\x06\xfe\xc8F\x8d\x15\x90\x15\xbb\x04\xd48\x10\xc6\xd8b\x82\x88\x7fx<\xe5\xe6\x8b\x8f\x84\xdd\x1cU"\x83\xfb7\x9d`\xb0I\xb3\xbe;bvE\xc6\x92\xdd\xbe\x988\xe9y;\xc6.\xa1\xce\x94\xdc\xd8\xab\xaf\xba\x8f\xd8r\x8br\xc8\xa0\xac\xc0\xe9T\x87\x08\x08\x8b#-\xb6o\xf0\x1f\x0bzv\xb3\x81\x1a\xd4\xf7\x01\xdf\xc5A\x11\xe0\x0c\xc0\x87\xa6\xc2v\xbbR\xc4{"\xa5\xe5\xe0bx7\xfa\n\xae\xea\xfe\x02\xac\xef\xec\xd1\xc2\xc55\x06{\xe1\x0c\xb2\x99q\xd7\xd8\xcb\x97\x86\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1f\xeb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00Y\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x007\x03\x00\x00\x004\x00\x8c\xff\xc3\x00\x00\x00\x00\x00\x00\x01T\x00>\xff\xe3\x00\x80\x00[\x00\x00\x00\x00\x00\x00\x05R\x00\x08\x00\x05\x00j\xff\xfd\x00\x00\x00\x00\x00\x00\x17\xf0\x00j\xff\x99\x00j\x00\x03\x01\x03\xa1\xde8\x0f\xb75VB\xf6"`\x94\xc7\x0b\xaa\x1f\xa2Nv\x8a\xf9\xc9\x9a>\x13\xa3a\xc8\x0c\xcb?\x968\xc7\xeb\xc3\x10a\x1a\xa7\xfb\x85\xa7iu\x14`\x8f\x90\x16o\x97\xd5\t\xa4,\xe5\xed\xe1\x15\x86<\x9d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x1f\xeb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00]\xbf\xd7f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\xa1\xde8\x0f\xb75VB\xf6"`\x94\xc7\x0b\xaa\x1f\xa2Nv\x8a\xf9\xc9\x9a>\x13\xa3a\xc8\x0c\xcb?\x13\x16J\xe5\xfc\xa9\x06\xe8A\xe9\xc0Ql\xfb\xaeF\xcd\xd6\xa7\x8ei\xc4\xfa\xd4i\x84\xee\xc9\xe2\xaa\xa4f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00OB!\x81)\xf0l\xbcg\xa3^\xef\x0e\xfc\xb7\x02\x80\xe4\xa9NO\x89\xa0\t\xc3C\xd9\xda\xff\xd7\t\xeebfC&8\x9c+n$\x00\xa4\xe85\x19\xb0\xf6\x18\xa1\xeeR\xae\xec \x82k\xe0v@;\x1c\xc14PMh\xfb\xe3\x1c\xbf\x84O\xcd\xbc\xc4\xb8\xeabz`\xf7\x06;\xf6q\x8b,\x18\tf~\xd1\x11l#\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\n\x8b)\xaa\x96x8\xd76J\xa6\x8b[\x98\t\xe0\\\xe3^7qD\x8c\xf5q\x08\xf2\xa2\xc9\xb03mvU\x1a\xe2\x181\x88\xfe\t\x03?\x12\xadj\x9d\xe8K\xb8!\xee\xe7e8\x82\xfb$\xf0Y\xfaJ\x10\x1f\x1a\xe5\xe9\xa8\xbb\xea\x87\xfc\xb12y\x94\x8d,\x16\xe4C\x02\xba\xe6\xac\x94{\xc4c\x07(\xb8\xeb\xab\xe3\xcfy{6\x98\t\xf4\x8fm\xd62\x85\x87\xb0\x03f\x01B]\xe3\xc6\x13l6\x8d\x0e\x18\xc64%\x97\x1a\xa6\xf4\x8b)\xaa\x96x8\xd76J\xa6\x8b[\x98\t\xe0\\\xe3^7qD\x8c\xf5q\x08\xf2\xa2\xc9\xb03mv\x00\x00\x00\x00\x00\x00\x00\x00\x00_\xec\xebf\xff\xc8o8\xd9Rxlmily\xc2\xdb\xc29\xddN\x91\xb4g)\xd7:\'\xfbW\xe9'
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
|
@ -40,7 +46,7 @@ def event_loop():
|
|||
loop.close()
|
||||
|
||||
|
||||
class TestGenesisBlock():
|
||||
class TestGenesisBlock:
|
||||
@pytest.mark.asyncio
|
||||
async def test_basic_blockchain(self):
|
||||
store = FullNodeStore("fndb_test")
|
||||
|
@ -56,7 +62,7 @@ class TestGenesisBlock():
|
|||
assert await bc1.get_next_ips(genesis_block.header_hash) > 0
|
||||
|
||||
|
||||
class TestBlockValidation():
|
||||
class TestBlockValidation:
|
||||
@pytest.fixture(scope="module")
|
||||
async def initial_blockchain(self):
|
||||
"""
|
||||
|
@ -68,7 +74,9 @@ class TestBlockValidation():
|
|||
b: Blockchain = Blockchain(store, test_constants)
|
||||
await b.initialize()
|
||||
for i in range(1, 9):
|
||||
assert (await b.receive_block(blocks[i])) == ReceiveBlockResult.ADDED_TO_HEAD
|
||||
assert (
|
||||
await b.receive_block(blocks[i])
|
||||
) == ReceiveBlockResult.ADDED_TO_HEAD
|
||||
return (blocks, b)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
@ -89,6 +97,7 @@ class TestBlockValidation():
|
|||
), blocks[9].body)
|
||||
assert (await b.receive_block(block_bad)) == ReceiveBlockResult.DISCONNECTED_BLOCK
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_timestamp(self, initial_blockchain):
|
||||
blocks, b = initial_blockchain
|
||||
|
@ -141,6 +150,7 @@ class TestBlockValidation():
|
|||
blocks[9].header_block.header.data.extension_data
|
||||
), blocks[9].header_block.header.harvester_signature)
|
||||
), blocks[9].body)
|
||||
|
||||
assert (await b.receive_block(block_bad)) == ReceiveBlockResult.INVALID_BLOCK
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
@ -185,9 +195,9 @@ class TestBlockValidation():
|
|||
# Coinbase height invalid
|
||||
block_bad = FullBlock(blocks[9].header_block, Body(
|
||||
CoinbaseInfo(
|
||||
uint32(3),
|
||||
blocks[9].body.coinbase.amount,
|
||||
blocks[9].body.coinbase.puzzle_hash
|
||||
uint32(3),
|
||||
blocks[9].body.coinbase.amount,
|
||||
blocks[9].body.coinbase.puzzle_hash,
|
||||
),
|
||||
blocks[9].body.coinbase_signature,
|
||||
blocks[9].body.fees_target_info,
|
||||
|
@ -208,7 +218,9 @@ class TestBlockValidation():
|
|||
b: Blockchain = Blockchain(store, test_constants)
|
||||
await b.initialize()
|
||||
for i in range(1, num_blocks):
|
||||
assert (await b.receive_block(blocks[i])) == ReceiveBlockResult.ADDED_TO_HEAD
|
||||
assert (
|
||||
await b.receive_block(blocks[i])
|
||||
) == ReceiveBlockResult.ADDED_TO_HEAD
|
||||
|
||||
diff_25 = await b.get_next_difficulty(blocks[24].header_hash)
|
||||
diff_26 = await b.get_next_difficulty(blocks[25].header_hash)
|
||||
|
@ -238,7 +250,9 @@ class TestReorgs():
|
|||
await b.receive_block(block)
|
||||
assert b.get_current_tips()[0].height == 100
|
||||
|
||||
blocks_reorg_chain = bt.get_consecutive_blocks(test_constants, 30, blocks[:90], 9, b'1')
|
||||
blocks_reorg_chain = bt.get_consecutive_blocks(
|
||||
test_constants, 30, blocks[:90], 9, b"1"
|
||||
)
|
||||
for reorg_block in blocks_reorg_chain:
|
||||
result = await b.receive_block(reorg_block)
|
||||
if reorg_block.height < 90:
|
||||
|
@ -251,7 +265,7 @@ class TestReorgs():
|
|||
|
||||
@pytest.mark.asyncio
|
||||
async def test_reorg_from_genesis(self):
|
||||
blocks = bt.get_consecutive_blocks(test_constants, 20, [], 9, b'0')
|
||||
blocks = bt.get_consecutive_blocks(test_constants, 20, [], 9, b"0")
|
||||
store = FullNodeStore("fndb_test")
|
||||
await store._clear_database()
|
||||
b: Blockchain = Blockchain(store, test_constants)
|
||||
|
@ -261,7 +275,9 @@ class TestReorgs():
|
|||
assert b.get_current_tips()[0].height == 20
|
||||
|
||||
# Reorg from genesis
|
||||
blocks_reorg_chain = bt.get_consecutive_blocks(test_constants, 21, [blocks[0]], 9, b'1')
|
||||
blocks_reorg_chain = bt.get_consecutive_blocks(
|
||||
test_constants, 21, [blocks[0]], 9, b"1"
|
||||
)
|
||||
for reorg_block in blocks_reorg_chain:
|
||||
result = await b.receive_block(reorg_block)
|
||||
if reorg_block.height == 0:
|
||||
|
@ -273,14 +289,22 @@ class TestReorgs():
|
|||
assert b.get_current_tips()[0].height == 21
|
||||
|
||||
# Reorg back to original branch
|
||||
blocks_reorg_chain_2 = bt.get_consecutive_blocks(test_constants, 3, blocks, 9, b'3')
|
||||
await b.receive_block(blocks_reorg_chain_2[20]) == ReceiveBlockResult.ADDED_AS_ORPHAN
|
||||
assert (await b.receive_block(blocks_reorg_chain_2[21])) == ReceiveBlockResult.ADDED_TO_HEAD
|
||||
assert (await b.receive_block(blocks_reorg_chain_2[22])) == ReceiveBlockResult.ADDED_TO_HEAD
|
||||
blocks_reorg_chain_2 = bt.get_consecutive_blocks(
|
||||
test_constants, 3, blocks, 9, b"3"
|
||||
)
|
||||
await b.receive_block(
|
||||
blocks_reorg_chain_2[20]
|
||||
) == ReceiveBlockResult.ADDED_AS_ORPHAN
|
||||
assert (
|
||||
await b.receive_block(blocks_reorg_chain_2[21])
|
||||
) == ReceiveBlockResult.ADDED_TO_HEAD
|
||||
assert (
|
||||
await b.receive_block(blocks_reorg_chain_2[22])
|
||||
) == ReceiveBlockResult.ADDED_TO_HEAD
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_lca(self):
|
||||
blocks = bt.get_consecutive_blocks(test_constants, 5, [], 9, b'0')
|
||||
blocks = bt.get_consecutive_blocks(test_constants, 5, [], 9, b"0")
|
||||
store = FullNodeStore("fndb_test")
|
||||
await store._clear_database()
|
||||
b: Blockchain = Blockchain(store, test_constants)
|
||||
|
@ -289,15 +313,15 @@ class TestReorgs():
|
|||
await b.receive_block(block)
|
||||
|
||||
assert b.lca_block == blocks[3]
|
||||
block_5_2 = bt.get_consecutive_blocks(test_constants, 1, blocks[:5], 9, b'1')[5]
|
||||
block_5_3 = bt.get_consecutive_blocks(test_constants, 1, blocks[:5], 9, b'2')[5]
|
||||
block_5_2 = bt.get_consecutive_blocks(test_constants, 1, blocks[:5], 9, b"1")[5]
|
||||
block_5_3 = bt.get_consecutive_blocks(test_constants, 1, blocks[:5], 9, b"2")[5]
|
||||
|
||||
await b.receive_block(block_5_2)
|
||||
assert b.lca_block == blocks[4]
|
||||
await b.receive_block(block_5_3)
|
||||
assert b.lca_block == blocks[4]
|
||||
|
||||
reorg = bt.get_consecutive_blocks(test_constants, 6, [], 9, b'3')
|
||||
reorg = bt.get_consecutive_blocks(test_constants, 6, [], 9, b"3")
|
||||
for block in reorg:
|
||||
await b.receive_block(block)
|
||||
assert b.lca_block == blocks[0]
|
||||
|
|
83
tests/test_database.py
Normal file
83
tests/test_database.py
Normal file
|
@ -0,0 +1,83 @@
|
|||
import asyncio
|
||||
from typing import Any, Dict
|
||||
|
||||
import pytest
|
||||
from bson.binary import Binary
|
||||
from bson.codec_options import CodecOptions, TypeRegistry
|
||||
from motor import motor_asyncio
|
||||
|
||||
from src.consensus.constants import constants
|
||||
from src.database import FullNodeStore
|
||||
from src.types.block_body import BlockBody
|
||||
from src.types.block_header import BlockHeaderData
|
||||
from src.types.full_block import FullBlock
|
||||
from src.types.proof_of_space import ProofOfSpace
|
||||
from src.types.sized_bytes import bytes32
|
||||
from src.types.trunk_block import TrunkBlock
|
||||
from src.util.ints import uint32, uint64
|
||||
from src.util.streamable import Streamable
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def event_loop():
|
||||
loop = asyncio.get_event_loop()
|
||||
yield loop
|
||||
loop.close()
|
||||
|
||||
|
||||
class TestDatabase:
|
||||
@pytest.mark.asyncio
|
||||
async def test_basic_database(self):
|
||||
db = FullNodeStore("fndb_test")
|
||||
await db._clear_database()
|
||||
genesis = FullBlock.from_bytes(constants["GENESIS_BLOCK"])
|
||||
|
||||
# Save/get block
|
||||
await db.save_block(genesis)
|
||||
assert genesis == await db.get_block(genesis.header_hash)
|
||||
|
||||
# Save/get sync
|
||||
for sync_mode in (False, True):
|
||||
await db.set_sync_mode(sync_mode)
|
||||
assert sync_mode == await db.get_sync_mode()
|
||||
|
||||
# clear sync info
|
||||
await db.clear_sync_info()
|
||||
assert await db.get_potential_heads_number() == 0
|
||||
|
||||
# add/get potential head, get potential heads num
|
||||
await db.add_potential_head(genesis.header_hash)
|
||||
assert await db.get_potential_heads_number() == 1
|
||||
await db.add_potential_head(genesis.header_hash, genesis)
|
||||
assert await db.get_potential_heads_number() == 1
|
||||
assert genesis == await db.get_potential_head(genesis.header_hash)
|
||||
|
||||
# add/get potential trunk
|
||||
trunk = genesis.trunk_block
|
||||
await db.add_potential_trunk(trunk)
|
||||
assert await db.get_potential_trunk(genesis.height) == trunk
|
||||
|
||||
# Add potential block
|
||||
await db.add_potential_block(genesis)
|
||||
assert genesis == await db.get_potential_block(0)
|
||||
|
||||
# Add/get candidate block
|
||||
assert await db.get_candidate_block(0) is None
|
||||
partial = (
|
||||
genesis.body,
|
||||
genesis.trunk_block.header.data,
|
||||
genesis.trunk_block.proof_of_space,
|
||||
)
|
||||
await db.add_candidate_block(genesis.header_hash, *partial)
|
||||
assert await db.get_candidate_block(genesis.header_hash) == partial
|
||||
|
||||
# Add/get unfinished block
|
||||
key = (genesis.header_hash, 1000)
|
||||
assert await db.get_unfinished_block(key) is None
|
||||
await db.add_unfinished_block(key, genesis)
|
||||
assert await db.get_unfinished_block(key) == genesis
|
||||
|
||||
# Set/get unf block leader
|
||||
assert db.get_unfinished_block_leader() is None
|
||||
db.set_unfinished_block_leader(key)
|
||||
assert db.get_unfinished_block_leader() == key
|
Loading…
Reference in a new issue