Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 4 additions & 6 deletions chia/_tests/core/mempool/test_mempool_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@
QUOTE_BYTES,
QUOTE_EXECUTION_COST,
MempoolManager,
NewPeakItem,
TimelockConditions,
can_replace,
check_removals,
Expand Down Expand Up @@ -3288,7 +3287,7 @@ async def test_new_peak_txs_added(condition_and_error: tuple[ConditionOpcode, Er
assert mempool_manager.peak is not None
condition_height = mempool_manager.peak.height + 1
condition, expected_error = condition_and_error
sb, sb_name, result = await generate_and_add_spendbundle(mempool_manager, [[condition, condition_height]])
_, sb_name, result = await generate_and_add_spendbundle(mempool_manager, [[condition, condition_height]])
_, status, error = result
assert status == MempoolInclusionStatus.PENDING
assert error == expected_error
Expand All @@ -3299,14 +3298,13 @@ async def test_new_peak_txs_added(condition_and_error: tuple[ConditionOpcode, Er
create_test_block_record(height=uint32(condition_height)), spent_coins
)
# We're not there yet (needs to be higher, not equal)
assert new_peak_info.spend_bundle_ids == []
assert mempool_manager.get_mempool_item(sb_name, include_pending=False) is None
assert new_peak_info.items == []
else:
spent_coins = None
new_peak_info = await mempool_manager.new_peak(
create_test_block_record(height=uint32(condition_height + 1)), spent_coins
)
# The item gets retried successfully now
mi = mempool_manager.get_mempool_item(sb_name, include_pending=False)
assert mi is not None
assert new_peak_info.items == [NewPeakItem(sb_name, sb, mi.conds)]
assert new_peak_info.spend_bundle_ids == [sb_name]
assert mempool_manager.get_mempool_item(sb_name, include_pending=False) is not None
16 changes: 9 additions & 7 deletions chia/full_node/full_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@
from chia.full_node.hint_management import get_hints_and_subscription_coin_ids
from chia.full_node.hint_store import HintStore
from chia.full_node.mempool import MempoolRemoveInfo
from chia.full_node.mempool_manager import MempoolManager, NewPeakItem
from chia.full_node.mempool_manager import MempoolManager
from chia.full_node.subscriptions import PeerSubscriptions, peers_for_spend_bundle
from chia.full_node.sync_store import Peak, SyncStore
from chia.full_node.tx_processing_queue import TransactionQueue, TransactionQueueEntry
Expand Down Expand Up @@ -99,7 +99,8 @@
# This is the result of calling peak_post_processing, which is then fed into peak_post_processing_2
@dataclasses.dataclass
class PeakPostProcessingResult:
mempool_peak_result: list[NewPeakItem] # The new items from calling MempoolManager.new_peak
# The added transactions IDs from calling MempoolManager.new_peak
mempool_peak_added_tx_ids: list[bytes32]
mempool_removals: list[MempoolRemoveInfo] # The removed mempool items from calling MempoolManager.new_peak
fns_peak_result: FullNodeStorePeakResult # The result of calling FullNodeStore.new_peak
hints: list[tuple[bytes32, bytes]] # The hints added to the DB
Expand Down Expand Up @@ -321,7 +322,8 @@ async def manage(self) -> AsyncIterator[None]:
)
async with self.blockchain.priority_mutex.acquire(priority=BlockchainMutexPriority.high):
pending_tx = await self.mempool_manager.new_peak(self.blockchain.get_tx_peak(), None)
assert len(pending_tx.items) == 0 # no pending transactions when starting up
# No pending transactions when starting up
assert len(pending_tx.spend_bundle_ids) == 0

full_peak: Optional[FullBlock] = await self.blockchain.get_full_peak()
assert full_peak is not None
Expand Down Expand Up @@ -1939,7 +1941,7 @@ async def peak_post_processing(
mempool_new_peak_result = await self.mempool_manager.new_peak(self.blockchain.get_tx_peak(), spent_coins)

return PeakPostProcessingResult(
mempool_new_peak_result.items,
mempool_new_peak_result.spend_bundle_ids,
mempool_new_peak_result.removals,
fns_peak_result,
hints_to_add,
Expand All @@ -1961,9 +1963,9 @@ async def peak_post_processing_2(
record = state_change_summary.peak
for signage_point in ppp_result.signage_points:
await self.signage_point_post_processing(*signage_point)
for new_peak_item in ppp_result.mempool_peak_result:
self.log.debug(f"Added transaction to mempool: {new_peak_item.transaction_id}")
mempool_item = self.mempool_manager.get_mempool_item(new_peak_item.transaction_id)
for transaction_id in ppp_result.mempool_peak_added_tx_ids:
self.log.debug(f"Added transaction to mempool: {transaction_id}")
mempool_item = self.mempool_manager.get_mempool_item(transaction_id)
assert mempool_item is not None
await self.broadcast_added_tx(mempool_item)

Expand Down
11 changes: 2 additions & 9 deletions chia/full_node/mempool_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,17 +133,10 @@ class SpendBundleAddInfo:

@dataclass
class NewPeakInfo:
items: list[NewPeakItem]
spend_bundle_ids: list[bytes32]
removals: list[MempoolRemoveInfo]


@dataclass
class NewPeakItem:
transaction_id: bytes32
spend_bundle: SpendBundle
conds: SpendBundleConditions


# For block overhead cost calculation
QUOTE_BYTES = 2
QUOTE_EXECUTION_COST = 20
Expand Down Expand Up @@ -992,7 +985,7 @@ async def local_get_coin_records(names: Collection[bytes32]) -> list[CoinRecord]
lineage_cache.get_unspent_lineage_info,
)
if info.status == MempoolInclusionStatus.SUCCESS:
txs_added.append(NewPeakItem(item.spend_bundle_name, item.spend_bundle, item.conds))
txs_added.append(item.spend_bundle_name)
mempool_item_removals.extend(info.removals)
log.info(
f"Size of mempool: {self.mempool.size()} spends, "
Expand Down
Loading