Compare commits

...

70 Commits

Author SHA1 Message Date
Florin Chirica cf1293e41e
mypy. 2023-06-29 16:46:14 +02:00
Florin Chirica 7ff6a516b4
mypy. 2023-06-29 16:39:56 +02:00
Florin Chirica 163fe45b8a
Revert typo. 2023-06-29 16:29:18 +02:00
Florin Chirica 4fadf150cc
Add test. 2023-06-29 16:28:17 +02:00
Florin Chirica 0c5d5e9a77
Add parser test. 2023-06-28 14:23:38 +02:00
Florin Chirica bd8989fd29
Work on the failing test. 2023-06-27 15:24:59 +02:00
Florin Chirica 5d750d00c1
Typo. 2023-06-27 03:16:35 +02:00
Florin Chirica c0e31ca2b4
Typo. 2023-06-27 01:59:46 +02:00
Florin Chirica 911eb4abd0
Some linting. 2023-06-26 19:50:06 +02:00
Florin Chirica 4101396815
Checkpoint. 2023-06-26 18:05:04 +02:00
Florin Chirica 8239391a7b
Prep for DB update. 2023-06-23 16:16:14 +02:00
Florin Chirica 1e40152e48
Some more comments. 2023-06-22 18:24:44 +02:00
Florin Chirica 642d45954f
Address some comments. 2023-06-22 14:48:01 +02:00
Florin Chirica 715ff47b24
Update tests/blockchain/test_blockchain.py
Co-authored-by: Arvid Norberg <arvid@libtorrent.org>
2023-06-22 01:13:36 +03:00
Florin Chirica e007d2b0e0
Merge branch 'main' into fc.chip13 2023-06-22 01:03:30 +03:00
Florin Chirica b90511acc8
Update tests/conftest.py
Co-authored-by: Arvid Norberg <arvid@libtorrent.org>
2023-06-21 17:29:54 +03:00
Florin Chirica eab39e05a8
Update test_blockchain.py 2023-06-15 00:26:01 +03:00
Florin Chirica ea28bca2b1
Update test_blockchain.py 2023-06-14 19:34:35 +03:00
Florin Chirica f9b35ab877
Update test_full_node_store.py 2023-06-14 18:58:13 +03:00
Florin Chirica 3849ab4f2d
Test. 2023-06-09 18:00:58 +03:00
Florin Chirica 25ada7379b
Skip 10000 blocks. 2023-06-07 02:09:17 +02:00
Florin Chirica 90c6e6e08a
Try to get test to work with 4000. 2023-06-07 01:56:33 +02:00
Florin Chirica 42be830533
Revert tests larger timeouts. 2023-06-07 01:24:42 +02:00
Florin Chirica 2f71dfc83b
Try to skip big test. 2023-06-06 23:58:41 +02:00
Florin Chirica 9d5f795453
Attempt higher timeouts. 2023-06-06 14:07:32 +02:00
Florin Chirica 2d087c26b5
Syntax. 2023-06-05 14:08:05 +02:00
Florin Chirica ea06d606f8
Enable all. 2023-06-05 14:04:30 +02:00
Florin Chirica 85ae15d91f
Enable tests. 2023-06-05 14:01:15 +02:00
wallentx 5c3871796f
Bump test-cache version to soft_fork3.0 DBs 2023-06-05 13:42:57 +02:00
almog 87fd725418
fix counter 2023-05-30 15:31:00 +03:00
almog 6f2ca7ed08
merge fix 2023-05-28 14:38:16 +03:00
almog 519ad154b5
Merge branch 'main' into fc.chip13
# Conflicts:
#	tests/conftest.py
2023-05-28 14:17:24 +03:00
Florin Chirica 646f85a122
Isort. 2023-05-24 22:46:58 +03:00
Florin Chirica 55b8c2587f
Improve test. 2023-05-24 20:36:30 +03:00
Florin Chirica b5a28f8e61
Add first test. 2023-05-24 19:26:14 +03:00
Florin Chirica c1e037015f
Lint. 2023-05-24 18:57:07 +03:00
Florin Chirica 4d4e971624
Fixtures. 2023-05-24 18:06:57 +03:00
Florin Chirica 277b05592a
Fixtures. 2023-05-24 17:28:45 +03:00
Florin Chirica f4eee5f91e
Test. 2023-05-24 14:37:41 +03:00
Florin Chirica fe940d93d0
Add tests with new interface 2023-05-24 14:19:20 +03:00
Florin Chirica 7ee318d2b6
Revert to main 2023-05-24 13:37:48 +03:00
Florin Chirica 5e635fc76c
Revert to main 2023-05-24 13:37:02 +03:00
Florin Chirica b69206719f
Revert to main 2023-05-24 13:35:44 +03:00
Florin Chirica 3f00ffe44a
Merge branch 'main' into fc.chip13 2023-05-24 13:33:08 +03:00
Florin Chirica 2646f8a3b4
Store test. 2023-05-23 15:59:49 +03:00
Florin Chirica 6f0f703ec8
Test blockchain. 2023-05-23 14:51:44 +03:00
Florin Chirica e3cded1951
Try to increase timeout. 2023-05-22 14:03:34 +03:00
Florin Chirica 74ac793b53
Try to lower the constraints. 2023-05-22 03:06:02 +03:00
Florin Chirica 7f0b5f8325
Skip cache tests. 2023-05-22 01:40:15 +03:00
Florin Chirica db56e082e6
Lint. 2023-05-22 00:54:34 +03:00
Florin Chirica f19aa94a88
pytest. 2023-05-22 00:44:17 +03:00
Florin Chirica bbdecdbdb0
WIP plot filters. 2023-05-22 00:38:57 +03:00
Florin Chirica 5d46b8c366
Revert "Try to fix tests."
This reverts commit f10c7fc847.
2023-05-21 15:56:17 +03:00
Florin Chirica f10c7fc847
Try to fix tests. 2023-05-20 01:48:23 +03:00
Florin Chirica ad4d382245
Run tests without block cache. 2023-05-20 00:16:50 +03:00
Florin Chirica ffddce65f1
Lint. 2023-05-19 22:29:54 +03:00
Florin Chirica 26e83b6f7b
Add persistent blocks. 2023-05-19 22:24:50 +03:00
Florin Chirica 407a016fe4
Try fix. 2023-05-19 21:27:23 +03:00
Florin Chirica db3127a629
Skip test. 2023-05-19 19:03:53 +03:00
Florin Chirica 4d51b11aa8
Skip test. 2023-05-19 18:55:34 +03:00
Florin Chirica 826509da7e
Add softfork3 to empty_blockchain too. 2023-05-19 18:11:27 +03:00
Florin Chirica 581ddda18f
Syntax. 2023-05-19 17:17:05 +03:00
Florin Chirica afd389e6e9
Parametrize bt. 2023-05-19 17:01:19 +03:00
Florin Chirica e6bb1ba686
Merge branch 'main' into fc.chip13 2023-05-19 14:59:39 +03:00
Florin Chirica a02f6e3b42
Trigger the soft fork. 2023-05-19 02:27:25 +03:00
Florin Chirica 73410e07b4
Fix BlockTools. 2023-05-19 01:59:27 +03:00
Florin Chirica a6f019a8ad
Lint. 2023-05-19 01:30:03 +03:00
Florin Chirica afebf46cc6
Black. 2023-05-19 01:25:01 +03:00
Florin Chirica c0f677c63b
Lint. 2023-05-19 01:18:43 +03:00
Florin Chirica 32ad3e74c0
Chip13 2023-05-18 22:52:04 +03:00
18 changed files with 517 additions and 60 deletions

View File

@ -25,7 +25,7 @@ jobs:
build:
name: Benchmarks
runs-on: benchmark
timeout-minutes: 30
timeout-minutes: 60
strategy:
fail-fast: false
max-parallel: 4
@ -33,7 +33,7 @@ jobs:
python-version: [ 3.9 ]
env:
CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet
BLOCKS_AND_PLOTS_VERSION: 0.29.0
BLOCKS_AND_PLOTS_VERSION: 0.30.0
steps:
- name: Clean workspace

View File

@ -122,7 +122,7 @@ jobs:
CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet
CHIA_SIMULATOR_ROOT: ${{ github.workspace }}/.chia/simulator
JOB_FILE_NAME: tests_${{ matrix.os.file_name }}_python-${{ matrix.python.file_name }}_${{ matrix.configuration.name }}
BLOCKS_AND_PLOTS_VERSION: 0.29.0
BLOCKS_AND_PLOTS_VERSION: 0.30.0
steps:
- name: Configure git
@ -197,7 +197,7 @@ jobs:
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
gh release download -R Chia-Network/test-cache 0.29.0 --archive=tar.gz -O - | tar xzf -
gh release download -R Chia-Network/test-cache ${{ env.BLOCKS_AND_PLOTS_VERSION }} --archive=tar.gz -O - | tar xzf -
mkdir ${{ github.workspace }}/.chia
mv ${{ github.workspace }}/test-cache-${{ env.BLOCKS_AND_PLOTS_VERSION }}/* ${{ github.workspace }}/.chia

View File

@ -203,6 +203,8 @@ async def run_add_block_benchmark(version: int) -> None:
deficit,
deficit == 16,
prev_transaction_height,
bytes32([0] * 32),
bytes32([0] * 32),
timestamp if is_transaction else None,
prev_transaction_block if prev_transaction_block != bytes32([0] * 32) else None,
None if fees == 0 else fees,

View File

@ -3,11 +3,12 @@ from __future__ import annotations
from pathlib import Path
from typing import Any, Dict, Optional
from chia.consensus.block_record import BlockRecord
from chia.consensus.block_record import BlockRecord, BlockRecordDB
from chia.consensus.default_constants import DEFAULT_CONSTANTS
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.full_block import FullBlock
from chia.util.config import load_config
from chia.util.full_block_utils import PlotFilterInfo, plot_filter_info_from_block
from chia.util.path import path_from_root
@ -102,8 +103,15 @@ def validate_v2(in_path: Path, *, validate_blocks: bool) -> None:
continue
if validate_blocks:
block = FullBlock.from_bytes(zstd.decompress(row[4]))
block_record = BlockRecord.from_bytes(row[5])
block_bytes = zstd.decompress(row[4])
block = FullBlock.from_bytes(block_bytes)
block_record_db: BlockRecordDB = BlockRecordDB.from_bytes(row[5])
plot_filter_info: PlotFilterInfo = plot_filter_info_from_block(block_bytes)
block_record = BlockRecord.from_block_record_db(
block_record_db,
plot_filter_info.pos_ss_cc_challenge_hash,
plot_filter_info.cc_sp_hash,
)
actual_header_hash = block.header_hash
actual_prev_hash = block.prev_header_hash
if actual_header_hash != hh:
@ -129,6 +137,21 @@ def validate_v2(in_path: Path, *, validate_blocks: bool) -> None:
raise RuntimeError(
f"Block {hh.hex()} has a mismatching height: {block.height} expected {height}"
)
if block_record.pos_ss_cc_challenge_hash != block.reward_chain_block.pos_ss_cc_challenge_hash:
raise RuntimeError(
f"Did not parse field pos_ss_cc_challenge_hash of block {hh.hex()} correctly: "
f"{block_record.pos_ss_cc_challenge_hash} "
f"expected {block.reward_chain_block.pos_ss_cc_challenge_hash}"
)
if block.reward_chain_block.challenge_chain_sp_vdf is None:
expected_cc_sp_hash: bytes32 = block.reward_chain_block.pos_ss_cc_challenge_hash
else:
expected_cc_sp_hash = block.reward_chain_block.challenge_chain_sp_vdf.output.get_hash()
if block_record.cc_sp_hash != expected_cc_sp_hash:
raise RuntimeError(
f"Did not parse field cc_sp_hash of block {hh.hex()} correctly: "
f"{block_record.pos_ss_cc_challenge_hash} expected {expected_cc_sp_hash}"
)
if height != current_height:
# we're moving to the next level. Make sure we found the block

View File

@ -23,7 +23,7 @@ from chia.consensus.pot_iterations import (
)
from chia.consensus.vdf_info_computation import get_signage_point_vdf_info
from chia.types.blockchain_format.classgroup import ClassgroupElement
from chia.types.blockchain_format.proof_of_space import verify_and_get_quality_string
from chia.types.blockchain_format.proof_of_space import get_plot_id, passes_plot_filter, verify_and_get_quality_string
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.blockchain_format.slots import ChallengeChainSubSlot, RewardChainSubSlot, SubSlotProofs
from chia.types.blockchain_format.vdf import VDFInfo, VDFProof
@ -491,6 +491,27 @@ def validate_unfinished_header_block(
if q_str is None:
return None, ValidationError(Err.INVALID_POSPACE)
# 5c. Check plot id is not present within last `NUM_DISTINCT_CONSECUTIVE_PLOT_IDS` blocks.
if height >= constants.SOFT_FORK3_HEIGHT:
curr_optional_block_record: Optional[BlockRecord] = prev_b
plot_id = get_plot_id(header_block.reward_chain_block.proof_of_space)
curr_sp = cc_sp_hash
sp_count = 1
while curr_optional_block_record is not None and sp_count < constants.UNIQUE_PLOTS_WINDOW:
if passes_plot_filter(
constants,
plot_id,
curr_optional_block_record.pos_ss_cc_challenge_hash,
curr_optional_block_record.cc_sp_hash,
):
return None, ValidationError(Err.INVALID_POSPACE)
if curr_optional_block_record.cc_sp_hash != curr_sp:
sp_count += 1
curr_sp = curr_optional_block_record.cc_sp_hash
curr_optional_block_record = blocks.try_block_record(curr_optional_block_record.prev_hash)
# 6. check signage point index
# no need to check negative values as this is uint 8
if header_block.reward_chain_block.signage_point_index >= constants.NUM_SPS_SUB_SLOT:

View File

@ -41,6 +41,41 @@ class BlockRecordProtocol(Protocol):
return self.timestamp is not None
@streamable
@dataclass(frozen=True)
class BlockRecordDB(Streamable):
"""
This class contains the fields from `BlockRecord` that get stored in the DB.
Unlike `BlockRecord`, this should never extend with more fields, in order to avoid DB corruption.
"""
header_hash: bytes32
prev_hash: bytes32
height: uint32
weight: uint128
total_iters: uint128
signage_point_index: uint8
challenge_vdf_output: ClassgroupElement
infused_challenge_vdf_output: Optional[ClassgroupElement]
reward_infusion_new_challenge: bytes32
challenge_block_info_hash: bytes32
sub_slot_iters: uint64
pool_puzzle_hash: bytes32
farmer_puzzle_hash: bytes32
required_iters: uint64
deficit: uint8
overflow: bool
prev_transaction_block_height: uint32
timestamp: Optional[uint64]
prev_transaction_block_hash: Optional[bytes32]
fees: Optional[uint64]
reward_claims_incorporated: Optional[List[Coin]]
finished_challenge_slot_hashes: Optional[List[bytes32]]
finished_infused_challenge_slot_hashes: Optional[List[bytes32]]
finished_reward_slot_hashes: Optional[List[bytes32]]
sub_epoch_summary_included: Optional[SubEpochSummary]
@streamable
@dataclass(frozen=True)
class BlockRecord(Streamable):
@ -69,6 +104,8 @@ class BlockRecord(Streamable):
deficit: uint8 # A deficit of 16 is an overflow block after an infusion. Deficit of 15 is a challenge block
overflow: bool
prev_transaction_block_height: uint32
pos_ss_cc_challenge_hash: bytes32
cc_sp_hash: bytes32
# Transaction block (present iff is_transaction_block)
timestamp: Optional[uint64]
@ -117,3 +154,66 @@ class BlockRecord(Streamable):
def sp_total_iters(self, constants: ConsensusConstants) -> uint128:
return uint128(self.sp_sub_slot_total_iters(constants) + self.sp_iters(constants))
def to_block_record_db(self) -> BlockRecordDB:
return BlockRecordDB(
header_hash=self.header_hash,
prev_hash=self.prev_hash,
height=self.height,
weight=self.weight,
total_iters=self.total_iters,
signage_point_index=self.signage_point_index,
challenge_vdf_output=self.challenge_vdf_output,
infused_challenge_vdf_output=self.infused_challenge_vdf_output,
reward_infusion_new_challenge=self.reward_infusion_new_challenge,
challenge_block_info_hash=self.challenge_block_info_hash,
sub_slot_iters=self.sub_slot_iters,
pool_puzzle_hash=self.pool_puzzle_hash,
farmer_puzzle_hash=self.farmer_puzzle_hash,
required_iters=self.required_iters,
deficit=self.deficit,
overflow=self.overflow,
prev_transaction_block_height=self.prev_transaction_block_height,
timestamp=self.timestamp,
prev_transaction_block_hash=self.prev_transaction_block_hash,
fees=self.fees,
reward_claims_incorporated=self.reward_claims_incorporated,
finished_challenge_slot_hashes=self.finished_challenge_slot_hashes,
finished_infused_challenge_slot_hashes=self.finished_infused_challenge_slot_hashes,
finished_reward_slot_hashes=self.finished_reward_slot_hashes,
sub_epoch_summary_included=self.sub_epoch_summary_included,
)
@classmethod
def from_block_record_db(
cls, block_record_db: BlockRecordDB, pos_ss_cc_challenge_hash: bytes32, cc_sp_hash: bytes32
) -> BlockRecord:
return cls(
header_hash=block_record_db.header_hash,
prev_hash=block_record_db.prev_hash,
height=block_record_db.height,
weight=block_record_db.weight,
total_iters=block_record_db.total_iters,
signage_point_index=block_record_db.signage_point_index,
challenge_vdf_output=block_record_db.challenge_vdf_output,
infused_challenge_vdf_output=block_record_db.infused_challenge_vdf_output,
reward_infusion_new_challenge=block_record_db.reward_infusion_new_challenge,
challenge_block_info_hash=block_record_db.challenge_block_info_hash,
sub_slot_iters=block_record_db.sub_slot_iters,
pool_puzzle_hash=block_record_db.pool_puzzle_hash,
farmer_puzzle_hash=block_record_db.farmer_puzzle_hash,
required_iters=block_record_db.required_iters,
deficit=block_record_db.deficit,
overflow=block_record_db.overflow,
prev_transaction_block_height=block_record_db.prev_transaction_block_height,
pos_ss_cc_challenge_hash=pos_ss_cc_challenge_hash,
cc_sp_hash=cc_sp_hash,
timestamp=block_record_db.timestamp,
prev_transaction_block_hash=block_record_db.prev_transaction_block_hash,
fees=block_record_db.fees,
reward_claims_incorporated=block_record_db.reward_claims_incorporated,
finished_challenge_slot_hashes=block_record_db.finished_challenge_slot_hashes,
finished_infused_challenge_slot_hashes=block_record_db.finished_infused_challenge_slot_hashes,
finished_reward_slot_hashes=block_record_db.finished_reward_slot_hashes,
sub_epoch_summary_included=block_record_db.sub_epoch_summary_included,
)

View File

@ -78,6 +78,9 @@ class ConsensusConstants:
PLOT_FILTER_64_HEIGHT: uint32
PLOT_FILTER_32_HEIGHT: uint32
# number of consecutive plot ids required to be distinct
UNIQUE_PLOTS_WINDOW: uint8
def replace(self, **changes: object) -> "ConsensusConstants":
return dataclasses.replace(self, **changes)

View File

@ -67,6 +67,8 @@ default_kwargs = {
"PLOT_FILTER_64_HEIGHT": 15592000,
# June 2033
"PLOT_FILTER_32_HEIGHT": 20643000,
# Disallow plots from passing the plot filter for more than one out of any four consecutive signage points.
"UNIQUE_PLOTS_WINDOW": 4,
}

View File

@ -142,6 +142,12 @@ def header_block_to_sub_block_record(
timestamp = block.foliage_transaction_block.timestamp if block.foliage_transaction_block is not None else None
fees = block.transactions_info.fees if block.transactions_info is not None else None
if block.reward_chain_block.challenge_chain_sp_vdf is None:
# Edge case of first sp (start of slot), where sp_iters == 0
cc_sp_hash: bytes32 = block.reward_chain_block.pos_ss_cc_challenge_hash
else:
cc_sp_hash = block.reward_chain_block.challenge_chain_sp_vdf.output.get_hash()
return BlockRecord(
block.header_hash,
block.prev_header_hash,
@ -160,6 +166,8 @@ def header_block_to_sub_block_record(
deficit,
overflow,
prev_transaction_block_height,
block.reward_chain_block.pos_ss_cc_challenge_hash,
cc_sp_hash,
timestamp,
prev_transaction_block_hash,
fees,

View File

@ -8,14 +8,20 @@ from typing import Any, Dict, List, Optional, Sequence, Tuple, Union
import typing_extensions
import zstd
from chia.consensus.block_record import BlockRecord
from chia.consensus.block_record import BlockRecord, BlockRecordDB
from chia.types.blockchain_format.serialized_program import SerializedProgram
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.full_block import FullBlock
from chia.types.weight_proof import SubEpochChallengeSegment, SubEpochSegments
from chia.util.db_wrapper import DBWrapper2, execute_fetchone
from chia.util.errors import Err
from chia.util.full_block_utils import GeneratorBlockInfo, block_info_from_block, generator_from_block
from chia.util.full_block_utils import (
GeneratorBlockInfo,
PlotFilterInfo,
block_info_from_block,
generator_from_block,
plot_filter_info_from_block,
)
from chia.util.ints import uint32
from chia.util.lru_cache import LRUCache
@ -187,6 +193,7 @@ class BlockStore:
async def add_full_block(self, header_hash: bytes32, block: FullBlock, block_record: BlockRecord) -> None:
self.block_cache.put(header_hash, block)
block_record_db: BlockRecordDB = block_record.to_block_record_db()
if self.db_wrapper.db_version == 2:
ses: Optional[bytes] = (
@ -206,7 +213,7 @@ class BlockStore:
int(block.is_fully_compactified()),
False, # in_main_chain
self.compress(block),
bytes(block_record),
bytes(block_record_db),
),
)
@ -229,7 +236,7 @@ class BlockStore:
header_hash.hex(),
block.prev_header_hash.hex(),
block.height,
bytes(block_record),
bytes(block_record_db),
None
if block_record.sub_epoch_summary_included is None
else bytes(block_record.sub_epoch_summary_included),
@ -350,6 +357,36 @@ class BlockStore:
b.foliage.prev_block_hash, b.transactions_generator, b.transactions_generator_ref_list
)
async def get_plot_filter_info(self, header_hash: bytes32) -> Optional[PlotFilterInfo]:
cached = self.block_cache.get(header_hash)
if cached is not None:
if cached.reward_chain_block.challenge_chain_sp_vdf is None:
cc_sp_hash: bytes32 = cached.reward_chain_block.pos_ss_cc_challenge_hash
else:
cc_sp_hash = cached.reward_chain_block.challenge_chain_sp_vdf.output.get_hash()
return PlotFilterInfo(cached.reward_chain_block.pos_ss_cc_challenge_hash, cc_sp_hash)
formatted_str = "SELECT block, height from full_blocks WHERE header_hash=?"
async with self.db_wrapper.reader_no_transaction() as conn:
row = await execute_fetchone(conn, formatted_str, (self.maybe_to_hex(header_hash),))
if row is None:
return None
if self.db_wrapper.db_version == 2:
block_bytes = zstd.decompress(row[0])
else:
block_bytes = row[0]
try:
return plot_filter_info_from_block(block_bytes)
except Exception as e:
log.exception(f"cheap parser failed for block at height {row[1]}: {e}")
b = FullBlock.from_bytes(block_bytes)
if b.reward_chain_block.challenge_chain_sp_vdf is None:
cc_sp_hash = b.reward_chain_block.pos_ss_cc_challenge_hash
else:
cc_sp_hash = b.reward_chain_block.challenge_chain_sp_vdf.output.get_hash()
return PlotFilterInfo(b.reward_chain_block.pos_ss_cc_challenge_hash, cc_sp_hash)
async def get_generator(self, header_hash: bytes32) -> Optional[SerializedProgram]:
cached = self.block_cache.get(header_hash)
if cached is not None:
@ -414,7 +451,7 @@ class BlockStore:
if len(header_hashes) == 0:
return []
all_blocks: Dict[bytes32, BlockRecord] = {}
all_blocks: Dict[bytes32, BlockRecordDB] = {}
if self.db_wrapper.db_version == 2:
async with self.db_wrapper.reader_no_transaction() as conn:
async with conn.execute(
@ -424,20 +461,27 @@ class BlockStore:
) as cursor:
for row in await cursor.fetchall():
header_hash = bytes32(row[0])
all_blocks[header_hash] = BlockRecord.from_bytes(row[1])
all_blocks[header_hash] = BlockRecordDB.from_bytes(row[1])
else:
formatted_str = f'SELECT block from block_records WHERE header_hash in ({"?," * (len(header_hashes) - 1)}?)'
async with self.db_wrapper.reader_no_transaction() as conn:
async with conn.execute(formatted_str, [hh.hex() for hh in header_hashes]) as cursor:
for row in await cursor.fetchall():
block_rec: BlockRecord = BlockRecord.from_bytes(row[0])
all_blocks[block_rec.header_hash] = block_rec
block_rec_db: BlockRecordDB = BlockRecordDB.from_bytes(row[0])
all_blocks[block_rec_db.header_hash] = block_rec_db
ret: List[BlockRecord] = []
for hh in header_hashes:
if hh not in all_blocks:
raise ValueError(f"Header hash {hh} not in the blockchain")
ret.append(all_blocks[hh])
plot_filter_info = await self.get_plot_filter_info(hh)
assert plot_filter_info is not None
block_record = BlockRecord.from_block_record_db(
all_blocks[hh],
plot_filter_info.pos_ss_cc_challenge_hash,
plot_filter_info.cc_sp_hash,
)
ret.append(block_record)
return ret
async def get_block_bytes_by_hash(self, header_hashes: List[bytes32]) -> List[bytes]:
@ -516,7 +560,9 @@ class BlockStore:
) as cursor:
row = await cursor.fetchone()
if row is not None:
return BlockRecord.from_bytes(row[0])
block_record_db = BlockRecordDB.from_bytes(row[0])
else:
return None
else:
async with self.db_wrapper.reader_no_transaction() as conn:
@ -526,8 +572,18 @@ class BlockStore:
) as cursor:
row = await cursor.fetchone()
if row is not None:
return BlockRecord.from_bytes(row[0])
return None
block_record_db = BlockRecordDB.from_bytes(row[0])
else:
return None
plot_filter_info = await self.get_plot_filter_info(header_hash)
assert plot_filter_info is not None
block_record = BlockRecord.from_block_record_db(
block_record_db,
plot_filter_info.pos_ss_cc_challenge_hash,
plot_filter_info.cc_sp_hash,
)
return block_record
async def get_block_records_in_range(
self,
@ -539,7 +595,7 @@ class BlockStore:
if present.
"""
ret: Dict[bytes32, BlockRecord] = {}
ret: Dict[bytes32, BlockRecordDB] = {}
if self.db_wrapper.db_version == 2:
async with self.db_wrapper.reader_no_transaction() as conn:
async with conn.execute(
@ -548,7 +604,7 @@ class BlockStore:
) as cursor:
for row in await cursor.fetchall():
header_hash = bytes32(row[0])
ret[header_hash] = BlockRecord.from_bytes(row[1])
ret[header_hash] = BlockRecordDB.from_bytes(row[1])
else:
formatted_str = f"SELECT header_hash, block from block_records WHERE height >= {start} and height <= {stop}"
@ -557,9 +613,19 @@ class BlockStore:
async with await conn.execute(formatted_str) as cursor:
for row in await cursor.fetchall():
header_hash = self.maybe_from_hex(row[0])
ret[header_hash] = BlockRecord.from_bytes(row[1])
ret[header_hash] = BlockRecordDB.from_bytes(row[1])
return ret
result: Dict[bytes32, BlockRecord] = {}
for hh, block_record_db in ret.items():
plot_filter_info = await self.get_plot_filter_info(hh)
assert plot_filter_info is not None
block_record = BlockRecord.from_block_record_db(
block_record_db,
plot_filter_info.pos_ss_cc_challenge_hash,
plot_filter_info.cc_sp_hash,
)
result[hh] = block_record
return result
async def get_block_bytes_in_range(
self,
@ -616,7 +682,7 @@ class BlockStore:
if peak is None:
return {}, None
ret: Dict[bytes32, BlockRecord] = {}
ret: Dict[bytes32, BlockRecordDB] = {}
if self.db_wrapper.db_version == 2:
async with self.db_wrapper.reader_no_transaction() as conn:
async with conn.execute(
@ -625,7 +691,7 @@ class BlockStore:
) as cursor:
for row in await cursor.fetchall():
header_hash = bytes32(row[0])
ret[header_hash] = BlockRecord.from_bytes(row[1])
ret[header_hash] = BlockRecordDB.from_bytes(row[1])
else:
formatted_str = f"SELECT header_hash, block from block_records WHERE height >= {peak[1] - blocks_n}"
@ -633,9 +699,19 @@ class BlockStore:
async with conn.execute(formatted_str) as cursor:
for row in await cursor.fetchall():
header_hash = self.maybe_from_hex(row[0])
ret[header_hash] = BlockRecord.from_bytes(row[1])
ret[header_hash] = BlockRecordDB.from_bytes(row[1])
return ret, peak[0]
result: Dict[bytes32, BlockRecord] = {}
for hh, block_record_db in ret.items():
plot_filter_info = await self.get_plot_filter_info(hh)
assert plot_filter_info is not None
block_record = BlockRecord.from_block_record_db(
block_record_db,
plot_filter_info.pos_ss_cc_challenge_hash,
plot_filter_info.cc_sp_hash,
)
result[hh] = block_record
return result, peak[0]
async def set_peak(self, header_hash: bytes32) -> None:
# We need to be in a sqlite transaction here.

View File

@ -78,6 +78,7 @@ from chia.types.blockchain_format.proof_of_space import (
calculate_pos_challenge,
generate_plot_public_key,
generate_taproot_sk,
get_plot_id,
passes_plot_filter,
verify_and_get_quality_string,
)
@ -144,6 +145,7 @@ test_constants = DEFAULT_CONSTANTS.replace(
* 24
* 10, # Allows creating blockchains with timestamps up to 10 days in the future, for testing
"MEMPOOL_BLOCK_BUFFER": 6,
"UNIQUE_PLOTS_WINDOW": 2,
}
)
@ -538,6 +540,28 @@ class BlockTools:
def get_pool_wallet_tool(self) -> WalletTool:
return WalletTool(self.constants, self.pool_master_sk)
# Verifies if the given plot passed any of the previous `UNIQUE_PLOTS_WINDOW` plot filters.
def plot_id_passed_previous_filters(self, plot_id: bytes32, cc_sp_hash: bytes32, blocks: List[FullBlock]) -> bool:
curr_sp_hash = cc_sp_hash
sp_count = 1
for block in reversed(blocks):
if sp_count >= self.constants.UNIQUE_PLOTS_WINDOW:
return False
challenge = block.reward_chain_block.pos_ss_cc_challenge_hash
if block.reward_chain_block.challenge_chain_sp_vdf is None:
# Edge case of first sp (start of slot), where sp_iters == 0
cc_sp_hash = challenge
else:
cc_sp_hash = block.reward_chain_block.challenge_chain_sp_vdf.output.get_hash()
if passes_plot_filter(self.constants, plot_id, challenge, cc_sp_hash):
return True
if curr_sp_hash != cc_sp_hash:
sp_count += 1
curr_sp_hash = cc_sp_hash
return False
def get_consecutive_blocks(
self,
num_blocks: int,
@ -694,6 +718,10 @@ class BlockTools:
if required_iters <= latest_block.required_iters:
continue
assert latest_block.header_hash in blocks
plot_id = get_plot_id(proof_of_space)
if latest_block.height + 1 >= constants.SOFT_FORK3_HEIGHT:
if self.plot_id_passed_previous_filters(plot_id, cc_sp_output_hash, block_list) is True:
continue
additions = None
removals = None
if transaction_data_included:
@ -985,7 +1013,10 @@ class BlockTools:
if blocks_added_this_sub_slot == constants.MAX_SUB_SLOT_BLOCKS:
break
assert last_timestamp is not None
plot_id = get_plot_id(proof_of_space)
if latest_block.height + 1 >= constants.SOFT_FORK3_HEIGHT:
if self.plot_id_passed_previous_filters(plot_id, cc_sp_output_hash, block_list):
continue
if proof_of_space.pool_contract_puzzle_hash is not None:
if pool_reward_puzzle_hash is not None:
# The caller wants to be paid to a specific address, but this PoSpace is tied to an

View File

@ -8,6 +8,7 @@ from blspy import G1Element, G2Element
from chia_rs import serialized_length
from chiabip158 import PyBIP158
from chia.types.blockchain_format.classgroup import ClassgroupElement
from chia.types.blockchain_format.coin import Coin
from chia.types.blockchain_format.foliage import TransactionsInfo
from chia.types.blockchain_format.serialized_program import SerializedProgram
@ -322,3 +323,26 @@ def header_block_from_block(
header_block += bytes(transactions_info)
return header_block
@dataclass(frozen=True)
class PlotFilterInfo:
pos_ss_cc_challenge_hash: bytes32
cc_sp_hash: bytes32
def plot_filter_info_from_block(buf: memoryview) -> PlotFilterInfo:
buf = skip_list(buf, skip_end_of_sub_slot_bundle) # finished_sub_slots
buf = skip_uint128(buf) # weight
buf = skip_uint32(buf) # height
buf = skip_uint128(buf) # total_iters
buf = skip_uint8(buf) # signage_point_index
pos_ss_cc_challenge_hash = bytes32(buf[:32])
buf = skip_bytes32(buf) # pos_ss_cc_challenge_hash
buf = skip_proof_of_space(buf) # proof_of_space
# Optional[challenge_chain_sp_vdf]
if buf[0] == 0:
return PlotFilterInfo(pos_ss_cc_challenge_hash, pos_ss_cc_challenge_hash)
buf = buf[1 + 32 + 8 :] # optional, vdf info challenge, vdf info number_of_iterations
output = ClassgroupElement.from_bytes(buf[:100])
return PlotFilterInfo(pos_ss_cc_challenge_hash, output.get_hash())

View File

@ -120,6 +120,16 @@ async def _validate_and_add_block_multi_error(
raise AssertionError("Did not return an error")
async def _validate_and_add_block_multi_error_or_pass(
blockchain: Blockchain, block: FullBlock, expected_errors: List[Err], skip_prevalidation: bool = False
) -> None:
# Checks that the blockchain returns one of the expected errors, also allows block to be added.
try:
await _validate_and_add_block(blockchain, block, skip_prevalidation=skip_prevalidation)
except AssertionError as e:
assert e.args[0] in expected_errors
async def _validate_and_add_block_multi_result(
blockchain: Blockchain,
block: FullBlock,

View File

@ -52,6 +52,7 @@ from chia.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle import (
from tests.blockchain.blockchain_test_utils import (
_validate_and_add_block,
_validate_and_add_block_multi_error,
_validate_and_add_block_multi_error_or_pass,
_validate_and_add_block_multi_result,
_validate_and_add_block_no_error,
)
@ -208,9 +209,7 @@ class TestBlockHeaderValidation:
new_finished_ss_3.challenge_chain.get_hash(),
)
log.warning(f"Number of slots: {len(block.finished_sub_slots)}")
block_bad_3 = recursive_replace(
block, "finished_sub_slots", [new_finished_ss_3] + block.finished_sub_slots[1:]
)
block_bad_3 = recursive_replace(block, "finished_sub_slots", [new_finished_ss_3])
header_block_bad_3 = get_block_header(block_bad_3, [], [])
_, error = validate_finished_header_block(
@ -239,9 +238,7 @@ class TestBlockHeaderValidation:
"reward_chain.challenge_chain_sub_slot_hash",
new_finished_ss_4.challenge_chain.get_hash(),
)
block_bad_4 = recursive_replace(
block, "finished_sub_slots", [new_finished_ss_4] + block.finished_sub_slots[1:]
)
block_bad_4 = recursive_replace(block, "finished_sub_slots", [new_finished_ss_4])
header_block_bad_4 = get_block_header(block_bad_4, [], [])
_, error = validate_finished_header_block(
@ -768,12 +765,13 @@ class TestBlockHeaderValidation:
blocks_base = default_400_blocks[: bt.constants.EPOCH_BLOCKS]
assert len(blocks_base) == bt.constants.EPOCH_BLOCKS
blocks_1 = bt.get_consecutive_blocks(1, block_list_input=blocks_base, force_overflow=True)
blocks_2 = bt.get_consecutive_blocks(1, skip_slots=3, block_list_input=blocks_base, force_overflow=True)
blocks_2 = bt.get_consecutive_blocks(1, skip_slots=5, block_list_input=blocks_base, force_overflow=True)
for block in blocks_base:
await _validate_and_add_block(empty_blockchain, block, skip_prevalidation=True)
await _validate_and_add_block(
empty_blockchain, blocks_1[-1], expected_result=AddBlockResult.NEW_PEAK, skip_prevalidation=True
)
assert blocks_1[-1].header_hash != blocks_2[-1].header_hash
await _validate_and_add_block(
empty_blockchain, blocks_2[-1], expected_result=AddBlockResult.ADDED_AS_ORPHAN, skip_prevalidation=True
)
@ -1364,6 +1362,8 @@ class TestBlockHeaderValidation:
@pytest.mark.asyncio
async def test_pool_target_contract(self, empty_blockchain, bt):
if bt.constants.SOFT_FORK3_HEIGHT == 0:
pytest.skip("Skipped temporarily until adding more pool plots.")
# 20c invalid pool target with contract
blocks_initial = bt.get_consecutive_blocks(2)
await _validate_and_add_block(empty_blockchain, blocks_initial[0])
@ -3611,3 +3611,46 @@ async def test_reorg_flip_flop(empty_blockchain, bt):
for block in chain_b[40:]:
await _validate_and_add_block(b, block)
@pytest.mark.parametrize("unique_plots_window", [1, 2])
@pytest.mark.parametrize("bt_respects_soft_fork3", [True, False])
@pytest.mark.parametrize("soft_fork3_height", [0, 10, 10000])
@pytest.mark.asyncio
async def test_soft_fork3_activation(
blockchain_constants, bt_respects_soft_fork3, soft_fork3_height, db_version, unique_plots_window
):
with TempKeyring() as keychain:
bt = await create_block_tools_async(
constants=blockchain_constants.replace(
SOFT_FORK3_HEIGHT=(0 if bt_respects_soft_fork3 else 10000),
UNIQUE_PLOTS_WINDOW=unique_plots_window,
),
keychain=keychain,
)
blockchain_constants = bt.constants.replace(SOFT_FORK3_HEIGHT=soft_fork3_height)
b, db_wrapper, db_path = await create_blockchain(blockchain_constants, db_version)
blocks = bt.get_consecutive_blocks(25)
for height, block in enumerate(blocks):
await _validate_and_add_block_multi_error_or_pass(b, block, [Err.INVALID_POSPACE])
peak = b.get_peak()
assert peak is not None
if peak.height != height:
break
peak = b.get_peak()
assert peak is not None
if bt_respects_soft_fork3 or unique_plots_window == 1:
assert peak.height == 24
else:
if soft_fork3_height == 0:
assert peak.height < 24
elif soft_fork3_height == 10:
assert peak.height < 24 and peak.height >= 9
else:
assert peak.height == 24
await db_wrapper.close()
b.shut_down()
db_path.unlink()

View File

@ -94,9 +94,10 @@ def get_keychain():
class Mode(Enum):
PLAIN = 0
SOFT_FORK3 = 1
@pytest.fixture(scope="session", params=[Mode.PLAIN])
@pytest.fixture(scope="session", params=[Mode.PLAIN, Mode.SOFT_FORK3])
def consensus_mode(request):
return request.param
@ -105,6 +106,8 @@ def consensus_mode(request):
def blockchain_constants(consensus_mode) -> ConsensusConstants:
if consensus_mode == Mode.PLAIN:
return test_constants
if consensus_mode == Mode.SOFT_FORK3:
return test_constants.replace(SOFT_FORK3_HEIGHT=0)
raise AssertionError("Invalid Blockchain mode in simulation")
@ -166,25 +169,37 @@ saved_blocks_version = "rc5"
@pytest.fixture(scope="session")
def default_400_blocks(bt):
version = ""
if bt.constants.SOFT_FORK3_HEIGHT == 0:
version = "_softfork3"
from tests.util.blockchain import persistent_blocks
return persistent_blocks(400, f"test_blocks_400_{saved_blocks_version}.db", bt, seed=b"400")
return persistent_blocks(400, f"test_blocks_400_{saved_blocks_version}{version}.db", bt, seed=b"400")
@pytest.fixture(scope="session")
def default_1000_blocks(bt):
version = ""
if bt.constants.SOFT_FORK3_HEIGHT == 0:
version = "_softfork3"
from tests.util.blockchain import persistent_blocks
return persistent_blocks(1000, f"test_blocks_1000_{saved_blocks_version}.db", bt, seed=b"1000")
return persistent_blocks(1000, f"test_blocks_1000_{saved_blocks_version}{version}.db", bt, seed=b"1000")
@pytest.fixture(scope="session")
def pre_genesis_empty_slots_1000_blocks(bt):
version = ""
if bt.constants.SOFT_FORK3_HEIGHT == 0:
version = "_softfork3"
from tests.util.blockchain import persistent_blocks
return persistent_blocks(
1000,
f"pre_genesis_empty_slots_1000_blocks{saved_blocks_version}.db",
f"pre_genesis_empty_slots_1000_blocks{saved_blocks_version}{version}.db",
bt,
seed=b"empty_slots",
empty_sub_slots=1,
@ -193,20 +208,30 @@ def pre_genesis_empty_slots_1000_blocks(bt):
@pytest.fixture(scope="session")
def default_1500_blocks(bt):
version = ""
if bt.constants.SOFT_FORK3_HEIGHT == 0:
version = "_softfork3"
from tests.util.blockchain import persistent_blocks
return persistent_blocks(1500, f"test_blocks_1500_{saved_blocks_version}.db", bt, seed=b"1500")
return persistent_blocks(1500, f"test_blocks_1500_{saved_blocks_version}{version}.db", bt, seed=b"1500")
@pytest.fixture(scope="session")
def default_10000_blocks(bt):
from tests.util.blockchain import persistent_blocks
return persistent_blocks(10000, f"test_blocks_10000_{saved_blocks_version}.db", bt, seed=b"10000")
if bt.constants.SOFT_FORK3_HEIGHT == 0:
pytest.skip("Test cache not available yet")
else:
return persistent_blocks(10000, f"test_blocks_10000_{saved_blocks_version}.db", bt, seed=b"10000")
@pytest.fixture(scope="session")
def default_20000_blocks(bt):
if bt.constants.SOFT_FORK3_HEIGHT == 0:
pytest.skip("Test cache not available")
from tests.util.blockchain import persistent_blocks
return persistent_blocks(20000, f"test_blocks_20000_{saved_blocks_version}.db", bt, seed=b"20000")
@ -214,11 +239,15 @@ def default_20000_blocks(bt):
@pytest.fixture(scope="session")
def test_long_reorg_blocks(bt, default_1500_blocks):
version = ""
if bt.constants.SOFT_FORK3_HEIGHT == 0:
version = "_softfork3"
from tests.util.blockchain import persistent_blocks
return persistent_blocks(
758,
f"test_blocks_long_reorg_{saved_blocks_version}.db",
f"test_blocks_long_reorg_{saved_blocks_version}{version}.db",
bt,
block_list_input=default_1500_blocks[:320],
seed=b"reorg_blocks",
@ -228,11 +257,15 @@ def test_long_reorg_blocks(bt, default_1500_blocks):
@pytest.fixture(scope="session")
def default_2000_blocks_compact(bt):
version = ""
if bt.constants.SOFT_FORK3_HEIGHT == 0:
version = "_softfork3"
from tests.util.blockchain import persistent_blocks
return persistent_blocks(
2000,
f"test_blocks_2000_compact_{saved_blocks_version}.db",
f"test_blocks_2000_compact_{saved_blocks_version}{version}.db",
bt,
normalized_to_identity_cc_eos=True,
normalized_to_identity_icc_eos=True,
@ -246,16 +279,19 @@ def default_2000_blocks_compact(bt):
def default_10000_blocks_compact(bt):
from tests.util.blockchain import persistent_blocks
return persistent_blocks(
10000,
f"test_blocks_10000_compact_{saved_blocks_version}.db",
bt,
normalized_to_identity_cc_eos=True,
normalized_to_identity_icc_eos=True,
normalized_to_identity_cc_ip=True,
normalized_to_identity_cc_sp=True,
seed=b"1000_compact",
)
if bt.constants.SOFT_FORK3_HEIGHT == 0:
pytest.skip("Test cache not available yet")
else:
return persistent_blocks(
10000,
f"test_blocks_10000_compact_{saved_blocks_version}.db",
bt,
normalized_to_identity_cc_eos=True,
normalized_to_identity_icc_eos=True,
normalized_to_identity_cc_ip=True,
normalized_to_identity_cc_sp=True,
seed=b"1000_compact",
)
@pytest.fixture(scope="function")
@ -316,8 +352,13 @@ async def five_nodes(db_version, self_hostname, blockchain_constants):
@pytest_asyncio.fixture(scope="function")
async def wallet_nodes():
async_gen = setup_simulators_and_wallets(2, 1, {"MEMPOOL_BLOCK_BUFFER": 1, "MAX_BLOCK_COST_CLVM": 400000000})
async def wallet_nodes(blockchain_constants):
constants = blockchain_constants
async_gen = setup_simulators_and_wallets(
2,
1,
{"MEMPOOL_BLOCK_BUFFER": 1, "MAX_BLOCK_COST_CLVM": 400000000, "SOFT_FORK3_HEIGHT": constants.SOFT_FORK3_HEIGHT},
)
nodes, wallets, bt = await async_gen.__anext__()
full_node_1 = nodes[0]
full_node_2 = nodes[1]
@ -344,8 +385,10 @@ async def two_nodes_sim_and_wallets():
@pytest_asyncio.fixture(scope="function")
async def two_nodes_sim_and_wallets_services():
async for _ in setup_simulators_and_wallets_service(2, 0, {}):
async def two_nodes_sim_and_wallets_services(blockchain_constants):
async for _ in setup_simulators_and_wallets_service(
2, 0, {"SOFT_FORK3_HEIGHT": blockchain_constants.SOFT_FORK3_HEIGHT}
):
yield _

View File

@ -345,3 +345,58 @@ async def test_get_block_bytes_in_range(tmp_dir: Path, bt: BlockTools, db_versio
with pytest.raises(ValueError):
await store_2.get_block_bytes_in_range(0, 10)
@pytest.mark.asyncio
async def test_get_plot_filer_info(
default_1000_blocks: List[FullBlock], tmp_dir: Path, db_version: int, bt: BlockTools
) -> None:
assert sqlite3.threadsafety >= 1
async with DBConnection(db_version) as db_wrapper, DBConnection(db_version) as db_wrapper_2:
# Use a different file for the blockchain
coin_store_2 = await CoinStore.create(db_wrapper_2)
store_2 = await BlockStore.create(db_wrapper_2)
bc = await Blockchain.create(coin_store_2, store_2, bt.constants, tmp_dir, 2)
store = await BlockStore.create(db_wrapper)
await BlockStore.create(db_wrapper_2)
blocks: List[FullBlock] = []
expected_cc_sp_hashes: List[bytes32] = []
for block in default_1000_blocks:
await _validate_and_add_block(bc, block)
block_record_to_add = bc.block_record(block.header_hash)
await store.add_full_block(block.header_hash, block, block_record_to_add)
blocks.append(block)
if block.reward_chain_block.challenge_chain_sp_vdf is None:
expected_cc_sp_hashes.append(block.reward_chain_block.pos_ss_cc_challenge_hash)
else:
expected_cc_sp_hashes.append(block.reward_chain_block.challenge_chain_sp_vdf.output.get_hash())
# Keep the query small.
if len(blocks) > 5:
blocks.pop(0)
expected_cc_sp_hashes.pop(0)
block_records = await store.get_block_records_by_hash([block.header_hash for block in blocks])
for full_b, block_record, expected_cc_sp in zip(blocks, block_records, expected_cc_sp_hashes):
assert block_record.pos_ss_cc_challenge_hash == full_b.reward_chain_block.pos_ss_cc_challenge_hash
assert block_record.cc_sp_hash == expected_cc_sp
opt_block_record = await store.get_block_record(block.header_hash)
assert opt_block_record is not None
assert opt_block_record.pos_ss_cc_challenge_hash == block.reward_chain_block.pos_ss_cc_challenge_hash
assert opt_block_record.cc_sp_hash == expected_cc_sp_hashes[-1]
block_records_dict = await store.get_block_records_in_range(max(0, block.height - 4), block.height)
for full_b, expected_cc_sp in zip(blocks, expected_cc_sp_hashes):
block_record = block_records_dict[full_b.header_hash]
assert block_record.pos_ss_cc_challenge_hash == full_b.reward_chain_block.pos_ss_cc_challenge_hash
assert block_record.cc_sp_hash == expected_cc_sp
await store.set_peak(block.header_hash)
block_records_dict, _ = await store.get_block_records_close_to_peak(4)
for full_b, expected_cc_sp in zip(blocks, expected_cc_sp_hashes):
block_record = block_records_dict[full_b.header_hash]
assert block_record.pos_ss_cc_challenge_hash == full_b.reward_chain_block.pos_ss_cc_challenge_hash
assert block_record.cc_sp_hash == expected_cc_sp

View File

@ -730,7 +730,9 @@ class TestFullNodeStore:
assert_sp_none(i2 + 1, False)
assert_sp_none(i1, True)
assert_sp_none(i1 + 1, True)
assert_sp_none(i1 + 4, True)
# We load into `all_sps` only up to `NUM_SPS_SUB_SLOT - 3`, so make sure we're not out of range
if i1 + 4 < custom_block_tools.constants.NUM_SPS_SUB_SLOT - 3:
assert_sp_none(i1 + 4, True)
for i in range(i2, custom_block_tools.constants.NUM_SPS_SUB_SLOT):
if is_overflow_block(custom_block_tools.constants, uint8(i)):
@ -799,6 +801,8 @@ class TestFullNodeStore:
store = FullNodeStore(blockchain.constants)
peak = None
peak_full_block = None
if blockchain.constants.SOFT_FORK3_HEIGHT == 0:
pytest.skip("Test temporarily skipped.")
for block in default_1000_blocks:
for sub_slot in block.finished_sub_slots:
assert store.new_finished_sub_slot(sub_slot, blockchain, peak, peak_full_block) is not None

View File

@ -23,7 +23,12 @@ from chia.types.blockchain_format.vdf import VDFInfo, VDFProof
from chia.types.end_of_slot_bundle import EndOfSubSlotBundle
from chia.types.full_block import FullBlock
from chia.types.header_block import HeaderBlock
from chia.util.full_block_utils import block_info_from_block, generator_from_block, header_block_from_block
from chia.util.full_block_utils import (
block_info_from_block,
generator_from_block,
header_block_from_block,
plot_filter_info_from_block,
)
from chia.util.generator_tools import get_block_header
from chia.util.ints import uint8, uint32, uint64, uint128
@ -258,6 +263,13 @@ async def test_parser():
assert block.transactions_generator == bi.transactions_generator
assert block.prev_header_hash == bi.prev_header_hash
assert block.transactions_generator_ref_list == bi.transactions_generator_ref_list
pfi = plot_filter_info_from_block(block_bytes)
assert pfi.pos_ss_cc_challenge_hash == block.reward_chain_block.pos_ss_cc_challenge_hash
if block.reward_chain_block.challenge_chain_sp_vdf is None:
expected_cc_sp_hash: bytes32 = block.reward_chain_block.pos_ss_cc_challenge_hash
else:
expected_cc_sp_hash = block.reward_chain_block.challenge_chain_sp_vdf.output.get_hash()
assert pfi.cc_sp_hash == expected_cc_sp_hash
# this doubles the run-time of this test, with questionable utility
# assert gen == FullBlock.from_bytes(block_bytes).transactions_generator