repo stringlengths 7 90 | file_url stringlengths 81 315 | file_path stringlengths 4 228 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 14:38:15 2026-01-05 02:33:18 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/util/default_root.py | flax/util/default_root.py | from __future__ import annotations
import os
from pathlib import Path
DEFAULT_ROOT_PATH = Path(os.path.expanduser(os.getenv("FLAX_ROOT", "~/.flax/mainnet"))).resolve()
DEFAULT_KEYS_ROOT_PATH = Path(os.path.expanduser(os.getenv("FLAX_KEYS_ROOT", "~/.flax_keys"))).resolve()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/util/__init__.py | flax/util/__init__.py | python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false | |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/util/lru_cache.py | flax/util/lru_cache.py | from __future__ import annotations
from collections import OrderedDict
from typing import Generic, Optional, TypeVar
K = TypeVar("K")
V = TypeVar("V")
class LRUCache(Generic[K, V]):
def __init__(self, capacity: int):
self.cache: OrderedDict[K, V] = OrderedDict()
self.capacity = capacity
def get(self, key: K) -> Optional[V]:
if key not in self.cache:
return None
else:
self.cache.move_to_end(key)
return self.cache[key]
def put(self, key: K, value: V) -> None:
self.cache[key] = value
self.cache.move_to_end(key)
if len(self.cache) > self.capacity:
self.cache.popitem(last=False)
def remove(self, key: K) -> None:
self.cache.pop(key)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/util/cached_bls.py | flax/util/cached_bls.py | from __future__ import annotations
import functools
from typing import Dict, List, Optional, Sequence
from blspy import AugSchemeMPL, G1Element, G2Element, GTElement
from flax.types.blockchain_format.sized_bytes import bytes32, bytes48
from flax.util.hash import std_hash
from flax.util.lru_cache import LRUCache
def get_pairings(
cache: LRUCache[bytes32, GTElement], pks: List[bytes48], msgs: Sequence[bytes], force_cache: bool
) -> List[GTElement]:
pairings: List[Optional[GTElement]] = []
missing_count: int = 0
for pk, msg in zip(pks, msgs):
aug_msg: bytes = pk + msg
h: bytes32 = std_hash(aug_msg)
pairing: Optional[GTElement] = cache.get(h)
if not force_cache and pairing is None:
missing_count += 1
# Heuristic to avoid more expensive sig validation with pairing
# cache when it's empty and cached pairings won't be useful later
# (e.g. while syncing)
if missing_count > len(pks) // 2:
return []
pairings.append(pairing)
# G1Element.from_bytes can be expensive due to subgroup check, so we avoid recomputing it with this cache
pk_bytes_to_g1: Dict[bytes48, G1Element] = {}
for i, pairing in enumerate(pairings):
if pairing is None:
aug_msg = pks[i] + msgs[i]
aug_hash: G2Element = AugSchemeMPL.g2_from_message(aug_msg)
pk_parsed: Optional[G1Element] = pk_bytes_to_g1.get(pks[i])
if pk_parsed is None:
# In this case, we use from_bytes instead of from_bytes_unchecked, because we will not be using
# the bls_signatures aggregate_verify method which performs the subgroup checks
pk_parsed = G1Element.from_bytes(pks[i])
pk_bytes_to_g1[pks[i]] = pk_parsed
pairing = pk_parsed.pair(aug_hash)
h = std_hash(aug_msg)
cache.put(h, pairing)
pairings[i] = pairing
return pairings
# Increasing this number will increase RAM usage, but decrease BLS validation time for blocks and unfinished blocks.
LOCAL_CACHE: LRUCache[bytes32, GTElement] = LRUCache(50000)
def aggregate_verify(
pks: List[bytes48],
msgs: Sequence[bytes],
sig: G2Element,
force_cache: bool = False,
cache: LRUCache[bytes32, GTElement] = LOCAL_CACHE,
) -> bool:
pairings: List[GTElement] = get_pairings(cache, pks, msgs, force_cache)
if len(pairings) == 0:
# Using AugSchemeMPL.aggregate_verify, so it's safe to use from_bytes_unchecked
pks_objects: List[G1Element] = [G1Element.from_bytes_unchecked(pk) for pk in pks]
res: bool = AugSchemeMPL.aggregate_verify(pks_objects, msgs, sig)
return res
pairings_prod: GTElement = functools.reduce(GTElement.__mul__, pairings)
res = pairings_prod == sig.pair(G1Element.generator())
return res
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/util/inline_executor.py | flax/util/inline_executor.py | from __future__ import annotations
from concurrent.futures import Executor, Future
from typing import Callable, TypeVar
_T = TypeVar("_T")
class InlineExecutor(Executor):
_closing: bool = False
def submit(self, fn: Callable[..., _T], *args, **kwargs) -> Future[_T]: # type: ignore
if self._closing:
raise RuntimeError("executor shutting down")
f: Future[_T] = Future()
try:
f.set_result(fn(*args, **kwargs))
except BaseException as e: # lgtm[py/catch-base-exception]
f.set_exception(e)
return f
def close(self) -> None:
self._closing = True
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/util/ints.py | flax/util/ints.py | from __future__ import annotations
from flax.util.struct_stream import StructStream, parse_metadata_from_name
@parse_metadata_from_name
class int8(StructStream):
pass
@parse_metadata_from_name
class uint8(StructStream):
pass
@parse_metadata_from_name
class int16(StructStream):
pass
@parse_metadata_from_name
class uint16(StructStream):
pass
@parse_metadata_from_name
class int32(StructStream):
pass
@parse_metadata_from_name
class uint32(StructStream):
pass
@parse_metadata_from_name
class int64(StructStream):
pass
@parse_metadata_from_name
class uint64(StructStream):
pass
@parse_metadata_from_name
class uint128(StructStream):
pass
class int512(StructStream):
PACK = None
# Uses 65 bytes to fit in the sign bit
SIZE = 65
BITS = 512
SIGNED = True
# note that the boundaries for int512 is not what you might expect. We
# encode these with one extra byte, but only allow a range of
# [-INT512_MAX, INT512_MAX]
MAXIMUM_EXCLUSIVE = 2**BITS
MINIMUM = -(2**BITS) + 1
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/util/db_version.py | flax/util/db_version.py | from __future__ import annotations
import sqlite3
import aiosqlite
async def lookup_db_version(db: aiosqlite.Connection) -> int:
try:
cursor = await db.execute("SELECT * from database_version")
row = await cursor.fetchone()
if row is not None and row[0] == 2:
return 2
else:
return 1
except aiosqlite.OperationalError:
# expects OperationalError('no such table: database_version')
return 1
async def set_db_version_async(db: aiosqlite.Connection, version: int) -> None:
await db.execute("CREATE TABLE database_version(version int)")
await db.execute("INSERT INTO database_version VALUES (?)", (version,))
await db.commit()
def set_db_version(db: sqlite3.Connection, version: int) -> None:
db.execute("CREATE TABLE database_version(version int)")
db.execute("INSERT INTO database_version VALUES (?)", (version,))
db.commit()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/util/prev_transaction_block.py | flax/util/prev_transaction_block.py | from __future__ import annotations
from typing import Tuple
from flax.consensus.block_record import BlockRecord
from flax.consensus.blockchain_interface import BlockchainInterface
from flax.util.ints import uint128
def get_prev_transaction_block(
curr: BlockRecord,
blocks: BlockchainInterface,
total_iters_sp: uint128,
) -> Tuple[bool, BlockRecord]:
prev_transaction_block = curr
while not curr.is_transaction_block:
curr = blocks.block_record(curr.prev_hash)
if total_iters_sp > curr.total_iters:
prev_transaction_block = curr
is_transaction_block = True
else:
is_transaction_block = False
return is_transaction_block, prev_transaction_block
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/util/significant_bits.py | flax/util/significant_bits.py | from __future__ import annotations
def truncate_to_significant_bits(input_x: int, num_significant_bits: int) -> int:
"""
Truncates the number such that only the top num_significant_bits contain 1s.
and the rest of the number is 0s (in binary). Ignores decimals and leading
zeroes. For example, -0b011110101 and 2, returns -0b11000000.
"""
x = abs(input_x)
if num_significant_bits > x.bit_length():
return input_x
lower = x.bit_length() - num_significant_bits
mask = (1 << (x.bit_length())) - 1 - ((1 << lower) - 1)
if input_x < 0:
return -(x & mask)
else:
return x & mask
def count_significant_bits(input_x: int) -> int:
"""
Counts the number of significant bits of an integer, ignoring negative signs
and leading zeroes. For example, for -0b000110010000, returns 5.
"""
x = input_x
for i in range(x.bit_length()):
if x & (1 << i) > 0:
return x.bit_length() - i
return 0
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/util/block_cache.py | flax/util/block_cache.py | from __future__ import annotations
import logging
from typing import Dict, List, Optional
from flax.consensus.block_record import BlockRecord
from flax.consensus.blockchain_interface import BlockchainInterface
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.blockchain_format.sub_epoch_summary import SubEpochSummary
from flax.types.header_block import HeaderBlock
from flax.types.weight_proof import SubEpochChallengeSegment, SubEpochSegments
from flax.util.ints import uint32
class BlockCache(BlockchainInterface):
def __init__(
self,
blocks: Dict[bytes32, BlockRecord],
headers: Dict[bytes32, HeaderBlock] = None,
height_to_hash: Dict[uint32, bytes32] = None,
sub_epoch_summaries: Dict[uint32, SubEpochSummary] = None,
):
if sub_epoch_summaries is None:
sub_epoch_summaries = {}
if height_to_hash is None:
height_to_hash = {}
if headers is None:
headers = {}
self._block_records = blocks
self._headers = headers
self._height_to_hash = height_to_hash
self._sub_epoch_summaries = sub_epoch_summaries
self._sub_epoch_segments: Dict[bytes32, SubEpochSegments] = {}
self.log = logging.getLogger(__name__)
def block_record(self, header_hash: bytes32) -> BlockRecord:
return self._block_records[header_hash]
def height_to_block_record(self, height: uint32, check_db: bool = False) -> BlockRecord:
# Precondition: height is < peak height
header_hash: Optional[bytes32] = self.height_to_hash(height)
assert header_hash is not None
return self.block_record(header_hash)
def get_ses_heights(self) -> List[uint32]:
return sorted(self._sub_epoch_summaries.keys())
def get_ses(self, height: uint32) -> SubEpochSummary:
return self._sub_epoch_summaries[height]
def height_to_hash(self, height: uint32) -> Optional[bytes32]:
if height not in self._height_to_hash:
self.log.warning(f"could not find height in cache {height}")
return None
return self._height_to_hash[height]
def contains_block(self, header_hash: bytes32) -> bool:
return header_hash in self._block_records
def contains_height(self, height: uint32) -> bool:
return height in self._height_to_hash
async def get_block_records_in_range(self, start: int, stop: int) -> Dict[bytes32, BlockRecord]:
return self._block_records
async def get_block_records_at(self, heights: List[uint32]) -> List[BlockRecord]:
block_records: List[BlockRecord] = []
for height in heights:
block_records.append(self.height_to_block_record(height))
return block_records
async def get_block_record_from_db(self, header_hash: bytes32) -> Optional[BlockRecord]:
return self._block_records[header_hash]
def remove_block_record(self, header_hash: bytes32):
del self._block_records[header_hash]
def add_block_record(self, block: BlockRecord):
self._block_records[block.header_hash] = block
async def get_header_blocks_in_range(
self, start: int, stop: int, tx_filter: bool = True
) -> Dict[bytes32, HeaderBlock]:
return self._headers
async def persist_sub_epoch_challenge_segments(
self, sub_epoch_summary_hash: bytes32, segments: List[SubEpochChallengeSegment]
):
self._sub_epoch_segments[sub_epoch_summary_hash] = SubEpochSegments(segments)
async def get_sub_epoch_challenge_segments(
self,
sub_epoch_summary_hash: bytes32,
) -> Optional[List[SubEpochChallengeSegment]]:
segments = self._sub_epoch_segments.get(sub_epoch_summary_hash)
if segments is None:
return None
return segments.challenge_segments
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/util/log_exceptions.py | flax/util/log_exceptions.py | from contextlib import contextmanager
import logging
import traceback
@contextmanager
def log_exceptions(log: logging.Logger, *, consume: bool = False):
try:
yield
except Exception as e:
log.error(f"Caught Exception: {e}. Traceback: {traceback.format_exc()}")
if not consume:
raise
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/util/safe_cancel_task.py | flax/util/safe_cancel_task.py | from __future__ import annotations
import asyncio
import logging
from typing import Optional
def cancel_task_safe(task: Optional[asyncio.Task], log: Optional[logging.Logger] = None):
if task is not None:
try:
task.cancel()
except Exception as e:
if log is not None:
log.error(f"Error while canceling task.{e} {task}")
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/util/ws_message.py | flax/util/ws_message.py | from secrets import token_bytes
from typing import Any, Dict, Optional
from flax.util.json_util import dict_to_json_str
from typing_extensions import TypedDict
# Messages must follow this format
# Message = { "command" "command_name",
# "data" : {...},
# "request_id": "bytes_32",
# "destination": "service_name",
# "origin": "service_name"
# }
class WsRpcMessage(TypedDict):
command: str
ack: bool
data: Dict[str, Any]
request_id: str
destination: str
origin: str
def format_response(incoming_msg: WsRpcMessage, response_data: Dict[str, Any]) -> str:
"""
Formats the response into standard format.
"""
response = {
"command": incoming_msg["command"],
"ack": True,
"data": response_data,
"request_id": incoming_msg["request_id"],
"destination": incoming_msg["origin"],
"origin": incoming_msg["destination"],
}
json_str = dict_to_json_str(response)
return json_str
def create_payload(command: str, data: Dict[str, Any], origin: str, destination: str) -> str:
response = create_payload_dict(command, data, origin, destination)
return dict_to_json_str(response)
def create_payload_dict(command: str, data: Optional[Dict[str, Any]], origin: str, destination: str) -> WsRpcMessage:
if data is None:
data = {}
return WsRpcMessage(
command=command,
ack=False,
data=data,
request_id=token_bytes().hex(),
destination=destination,
origin=origin,
)
def pong() -> Dict[str, Any]:
response = {"success": True}
return response
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/util/full_block_utils.py | flax/util/full_block_utils.py | from __future__ import annotations
import io
from dataclasses import dataclass
from typing import Callable, List, Optional, Tuple
from blspy import G1Element, G2Element
from chia_rs import serialized_length
from chiabip158 import PyBIP158
from flax.types.blockchain_format.coin import Coin
from flax.types.blockchain_format.foliage import TransactionsInfo
from flax.types.blockchain_format.program import SerializedProgram
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.ints import uint32
def skip_list(buf: memoryview, skip_item: Callable[[memoryview], memoryview]) -> memoryview:
n = int.from_bytes(buf[:4], "big", signed=False)
buf = buf[4:]
for _ in range(n):
buf = skip_item(buf)
return buf
def skip_bytes(buf: memoryview) -> memoryview:
n = int.from_bytes(buf[:4], "big", signed=False)
buf = buf[4:]
assert n >= 0
return buf[n:]
def skip_optional(buf: memoryview, skip_item: Callable[[memoryview], memoryview]) -> memoryview:
if buf[0] == 0:
return buf[1:]
assert buf[0] == 1
return skip_item(buf[1:])
def skip_bytes32(buf: memoryview) -> memoryview:
return buf[32:]
def skip_uint32(buf: memoryview) -> memoryview:
return buf[4:]
def skip_uint64(buf: memoryview) -> memoryview:
return buf[8:]
def skip_uint128(buf: memoryview) -> memoryview:
return buf[16:]
def skip_uint8(buf: memoryview) -> memoryview:
return buf[1:]
def skip_bool(buf: memoryview) -> memoryview:
assert buf[0] in [0, 1]
return buf[1:]
# def skip_class_group_element(buf: memoryview) -> memoryview:
# return buf[100:] # bytes100
def skip_vdf_info(buf: memoryview) -> memoryview:
# buf = skip_bytes32(buf)
# buf = skip_uint64(buf)
# return skip_class_group_element(buf)
return buf[32 + 8 + 100 :]
def skip_vdf_proof(buf: memoryview) -> memoryview:
buf = skip_uint8(buf) # witness_type
buf = skip_bytes(buf) # witness
return skip_bool(buf) # normalized_to_identity
def skip_challenge_chain_sub_slot(buf: memoryview) -> memoryview:
buf = skip_vdf_info(buf)
buf = skip_optional(buf, skip_bytes32) # infused challenge chain sub skit hash
buf = skip_optional(buf, skip_bytes32) # subepoch_summary_hash
buf = skip_optional(buf, skip_uint64) # new_sub_slot_iters
return skip_optional(buf, skip_uint64) # new_difficulty
def skip_infused_challenge_chain(buf: memoryview) -> memoryview:
return skip_vdf_info(buf) # infused_challenge_chain_end_of_slot_vdf
def skip_reward_chain_sub_slot(buf: memoryview) -> memoryview:
buf = skip_vdf_info(buf) # end_of_slot_vdf
buf = skip_bytes32(buf) # challenge_chain_sub_slot_hash
buf = skip_optional(buf, skip_bytes32) # infused_challenge_chain_sub_slot_hash
return skip_uint8(buf)
def skip_sub_slot_proofs(buf: memoryview) -> memoryview:
buf = skip_vdf_proof(buf) # challenge_chain_slot_proof
buf = skip_optional(buf, skip_vdf_proof) # infused_challenge_chain_slot_proof
return skip_vdf_proof(buf) # reward_chain_slot_proof
def skip_end_of_sub_slot_bundle(buf: memoryview) -> memoryview:
buf = skip_challenge_chain_sub_slot(buf)
buf = skip_optional(buf, skip_infused_challenge_chain)
buf = skip_reward_chain_sub_slot(buf)
return skip_sub_slot_proofs(buf)
def skip_g1_element(buf: memoryview) -> memoryview:
return buf[G1Element.SIZE :]
def skip_g2_element(buf: memoryview) -> memoryview:
return buf[G2Element.SIZE :]
def skip_proof_of_space(buf: memoryview) -> memoryview:
buf = skip_bytes32(buf) # challenge
buf = skip_optional(buf, skip_g1_element) # pool_public_key
buf = skip_optional(buf, skip_bytes32) # pool_contract_puzzle_hash
buf = skip_g1_element(buf) # plot_public_key
buf = skip_uint8(buf) # size
return skip_bytes(buf) # proof
def skip_reward_chain_block(buf: memoryview) -> memoryview:
buf = skip_uint128(buf) # weight
buf = skip_uint32(buf) # height
buf = skip_uint128(buf) # total_iters
buf = skip_uint8(buf) # signage_point_index
buf = skip_bytes32(buf) # pos_ss_cc_challenge_hash
buf = skip_proof_of_space(buf) # proof_of_space
buf = skip_optional(buf, skip_vdf_info) # challenge_chain_sp_vdf
buf = skip_g2_element(buf) # challenge_chain_sp_signature
buf = skip_vdf_info(buf) # challenge_chain_ip_vdf
buf = skip_optional(buf, skip_vdf_info) # reward_chain_sp_vdf
buf = skip_g2_element(buf) # reward_chain_sp_signature
buf = skip_vdf_info(buf) # reward_chain_ip_vdf
buf = skip_optional(buf, skip_vdf_info) # infused_challenge_chain_ip_vdf
return skip_bool(buf) # is_transaction_block
def skip_pool_target(buf: memoryview) -> memoryview:
# buf = skip_bytes32(buf) # puzzle_hash
# return skip_uint32(buf) # max_height
return buf[32 + 4 :]
def skip_foliage_block_data(buf: memoryview) -> memoryview:
buf = skip_bytes32(buf) # unfinished_reward_block_hash
buf = skip_pool_target(buf) # pool_target
buf = skip_optional(buf, skip_g2_element) # pool_signature
buf = skip_bytes32(buf) # farmer_reward_puzzle_hash
return skip_bytes32(buf) # extension_data
def skip_foliage(buf: memoryview) -> memoryview:
buf = skip_bytes32(buf) # prev_block_hash
buf = skip_bytes32(buf) # reward_block_hash
buf = skip_foliage_block_data(buf) # foliage_block_data
buf = skip_g2_element(buf) # foliage_block_data_signature
buf = skip_optional(buf, skip_bytes32) # foliage_transaction_block_hash
return skip_optional(buf, skip_g2_element) # foliage_transaction_block_signature
def prev_hash_from_foliage(buf: memoryview) -> Tuple[memoryview, bytes32]:
prev_hash = buf[:32] # prev_block_hash
buf = skip_bytes32(buf) # prev_block_hash
buf = skip_bytes32(buf) # reward_block_hash
buf = skip_foliage_block_data(buf) # foliage_block_data
buf = skip_g2_element(buf) # foliage_block_data_signature
buf = skip_optional(buf, skip_bytes32) # foliage_transaction_block_hash
return skip_optional(buf, skip_g2_element), bytes32(prev_hash) # foliage_transaction_block_signature
def skip_foliage_transaction_block(buf: memoryview) -> memoryview:
# buf = skip_bytes32(buf) # prev_transaction_block_hash
# buf = skip_uint64(buf) # timestamp
# buf = skip_bytes32(buf) # filter_hash
# buf = skip_bytes32(buf) # additions_root
# buf = skip_bytes32(buf) # removals_root
# return skip_bytes32(buf) # transactions_info_hash
return buf[32 + 8 + 32 + 32 + 32 + 32 :]
def skip_coin(buf: memoryview) -> memoryview:
# buf = skip_bytes32(buf) # parent_coin_info
# buf = skip_bytes32(buf) # puzzle_hash
# return skip_uint64(buf) # amount
return buf[32 + 32 + 8 :]
def skip_transactions_info(buf: memoryview) -> memoryview:
# buf = skip_bytes32(buf) # generator_root
# buf = skip_bytes32(buf) # generator_refs_root
# buf = skip_g2_element(buf) # aggregated_signature
# buf = skip_uint64(buf) # fees
# buf = skip_uint64(buf) # cost
buf = buf[32 + 32 + G2Element.SIZE + 8 + 8 :]
return skip_list(buf, skip_coin)
def generator_from_block(buf: memoryview) -> Optional[SerializedProgram]:
buf = skip_list(buf, skip_end_of_sub_slot_bundle) # finished_sub_slots
buf = skip_reward_chain_block(buf) # reward_chain_block
buf = skip_optional(buf, skip_vdf_proof) # challenge_chain_sp_proof
buf = skip_vdf_proof(buf) # challenge_chain_ip_proof
buf = skip_optional(buf, skip_vdf_proof) # reward_chain_sp_proof
buf = skip_vdf_proof(buf) # reward_chain_ip_proof
buf = skip_optional(buf, skip_vdf_proof) # infused_challenge_chain_ip_proof
buf = skip_foliage(buf) # foliage
buf = skip_optional(buf, skip_foliage_transaction_block) # foliage_transaction_block
buf = skip_optional(buf, skip_transactions_info) # transactions_info
# this is the transactions_generator optional
if buf[0] == 0:
return None
buf = buf[1:]
length = serialized_length(buf)
return SerializedProgram.from_bytes(bytes(buf[:length]))
# this implements the BlockInfo protocol
@dataclass(frozen=True)
class GeneratorBlockInfo:
prev_header_hash: bytes32
transactions_generator: Optional[SerializedProgram]
transactions_generator_ref_list: List[uint32]
def block_info_from_block(buf: memoryview) -> GeneratorBlockInfo:
buf = skip_list(buf, skip_end_of_sub_slot_bundle) # finished_sub_slots
buf = skip_reward_chain_block(buf) # reward_chain_block
buf = skip_optional(buf, skip_vdf_proof) # challenge_chain_sp_proof
buf = skip_vdf_proof(buf) # challenge_chain_ip_proof
buf = skip_optional(buf, skip_vdf_proof) # reward_chain_sp_proof
buf = skip_vdf_proof(buf) # reward_chain_ip_proof
buf = skip_optional(buf, skip_vdf_proof) # infused_challenge_chain_ip_proof
buf, prev_hash = prev_hash_from_foliage(buf) # foliage
buf = skip_optional(buf, skip_foliage_transaction_block) # foliage_transaction_block
buf = skip_optional(buf, skip_transactions_info) # transactions_info
# this is the transactions_generator optional
generator = None
if buf[0] != 0:
buf = buf[1:]
length = serialized_length(buf)
generator = SerializedProgram.from_bytes(bytes(buf[:length]))
buf = buf[length:]
else:
buf = buf[1:]
refs_length = uint32.from_bytes(buf[:4])
buf = buf[4:]
refs = []
for i in range(refs_length):
refs.append(uint32.from_bytes(buf[:4]))
buf = buf[4:]
return GeneratorBlockInfo(prev_hash, generator, refs)
def header_block_from_block(
buf: memoryview, request_filter: bool = True, tx_addition_coins: List[Coin] = [], removal_names: List[bytes32] = []
) -> bytes:
buf2 = buf[:]
buf2 = skip_list(buf2, skip_end_of_sub_slot_bundle) # finished_sub_slots
buf2 = skip_reward_chain_block(buf2) # reward_chain_block
buf2 = skip_optional(buf2, skip_vdf_proof) # challenge_chain_sp_proof
buf2 = skip_vdf_proof(buf2) # challenge_chain_ip_proof
buf2 = skip_optional(buf2, skip_vdf_proof) # reward_chain_sp_proof
buf2 = skip_vdf_proof(buf2) # reward_chain_ip_proof
buf2 = skip_optional(buf2, skip_vdf_proof) # infused_challenge_chain_ip_proof
buf2 = skip_foliage(buf2) # foliage
if buf2[0] == 0:
is_transaction_block = False
else:
is_transaction_block = True
buf2 = skip_optional(buf2, skip_foliage_transaction_block) # foliage_transaction_block
transactions_info: Optional[TransactionsInfo] = None
# we make it optional even if it's not by default
# if request_filter is True it will read extra bytes and populate it properly
transactions_info_optional: bytes = bytes([0])
encoded_filter = b"\x00"
if request_filter:
# this is the transactions_info optional
if buf2[0] == 0:
transactions_info_optional = bytes([0])
else:
transactions_info_optional = bytes([1])
buf3 = buf2[1:]
transactions_info = TransactionsInfo.parse(io.BytesIO(buf3))
byte_array_tx: List[bytearray] = []
if is_transaction_block and transactions_info:
addition_coins = tx_addition_coins + list(transactions_info.reward_claims_incorporated)
for coin in addition_coins:
byte_array_tx.append(bytearray(coin.puzzle_hash))
for name in removal_names:
byte_array_tx.append(bytearray(name))
bip158: PyBIP158 = PyBIP158(byte_array_tx)
encoded_filter = bytes(bip158.GetEncoded())
# Takes everything up to but not including transactions info
header_block: bytes = bytes(buf[: (len(buf) - len(buf2))])
# Transactions filter, potentially with added / removal coins
header_block += (len(encoded_filter)).to_bytes(4, "big") + encoded_filter
# Add transactions info
header_block += transactions_info_optional
if transactions_info is not None:
header_block += bytes(transactions_info)
return header_block
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/util/vdf_prover.py | flax/util/vdf_prover.py | from __future__ import annotations
from typing import Tuple
from chiavdf import prove
from flax.consensus.constants import ConsensusConstants
from flax.types.blockchain_format.classgroup import ClassgroupElement
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.blockchain_format.vdf import VDFInfo, VDFProof
from flax.util.ints import uint8, uint64
def get_vdf_info_and_proof(
constants: ConsensusConstants,
vdf_input: ClassgroupElement,
challenge_hash: bytes32,
number_iters: uint64,
normalized_to_identity: bool = False,
) -> Tuple[VDFInfo, VDFProof]:
form_size = ClassgroupElement.get_size(constants)
result: bytes = prove(
bytes(challenge_hash),
vdf_input.data,
constants.DISCRIMINANT_SIZE_BITS,
number_iters,
)
output = ClassgroupElement.from_bytes(result[:form_size])
proof_bytes = result[form_size : 2 * form_size]
return VDFInfo(challenge_hash, number_iters, output), VDFProof(uint8(0), proof_bytes, normalized_to_identity)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/util/file_keyring.py | flax/util/file_keyring.py | from __future__ import annotations
import base64
import contextlib
import os
import shutil
import sys
import threading
from dataclasses import asdict, dataclass, field
from hashlib import pbkdf2_hmac
from pathlib import Path
from secrets import token_bytes
from typing import Any, Dict, Iterator, Optional, Union
import yaml
from cryptography.hazmat.primitives.ciphers.aead import ChaCha20Poly1305 # pyright: reportMissingModuleSource=false
from typing_extensions import final
from watchdog.events import DirModifiedEvent, FileSystemEvent, FileSystemEventHandler
from watchdog.observers import Observer
from flax.util.default_root import DEFAULT_KEYS_ROOT_PATH
from flax.util.errors import KeychainFingerprintNotFound, KeychainLabelExists, KeychainLabelInvalid
from flax.util.lock import Lockfile
from flax.util.streamable import convert_byte_type
SALT_BYTES = 16 # PBKDF2 param
NONCE_BYTES = 12 # ChaCha20Poly1305 nonce is 12-bytes
HASH_ITERS = 100000 # PBKDF2 param
CHECKBYTES_VALUE = b"5f365b8292ee505b" # Randomly generated
MAX_LABEL_LENGTH = 65
MAX_SUPPORTED_VERSION = 1 # Max supported file format version
def generate_nonce() -> bytes:
"""
Creates a nonce to be used by ChaCha20Poly1305. This should be called each time
the payload is encrypted.
"""
return token_bytes(NONCE_BYTES)
def generate_salt() -> bytes:
"""
Creates a salt to be used in combination with the master passphrase to derive
a symmetric key using PBKDF2
"""
return token_bytes(SALT_BYTES)
def symmetric_key_from_passphrase(passphrase: str, salt: bytes) -> bytes:
return pbkdf2_hmac("sha256", passphrase.encode(), salt, HASH_ITERS)
def get_symmetric_key(salt: bytes) -> bytes:
from flax.cmds.passphrase_funcs import obtain_current_passphrase
try:
passphrase = obtain_current_passphrase(use_passphrase_cache=True)
except Exception as e:
print(f"Unable to unlock the keyring: {e}")
sys.exit(1)
return symmetric_key_from_passphrase(passphrase, salt)
def encrypt_data(input_data: bytes, key: bytes, nonce: bytes) -> bytes:
encryptor = ChaCha20Poly1305(key)
data = encryptor.encrypt(nonce, CHECKBYTES_VALUE + input_data, None)
return data
def decrypt_data(input_data: bytes, key: bytes, nonce: bytes) -> bytes:
decryptor = ChaCha20Poly1305(key)
output = decryptor.decrypt(nonce, input_data, None)
if CHECKBYTES_VALUE != output[: len(CHECKBYTES_VALUE)]:
raise ValueError("decryption failure (checkbytes)")
return output[len(CHECKBYTES_VALUE) :]
def default_file_keyring_data() -> Dict[str, Any]:
return {"keys": {}, "labels": {}}
def keyring_path_from_root(keys_root_path: Path) -> Path:
"""
Returns the path to keyring.yaml
"""
path_filename = keys_root_path / "keyring.yaml"
return path_filename
class FileKeyringVersionError(Exception):
def __init__(self, actual_version: int) -> None:
super().__init__(
f"Keyring format is unrecognized. Found version {actual_version}"
f", expected a value <= {MAX_SUPPORTED_VERSION}. "
"Please update to a newer version"
)
@final
@dataclass
class FileKeyringContent:
"""
FileKeyringContent represents the data structure of the keyring file. It contains an encrypted data part which is
encrypted with a key derived from the user-provided master passphrase.
"""
# The version of the whole keyring file structure
version: int = 1
# Random salt used as a PBKDF2 parameter. Updated when the master passphrase changes
salt: bytes = field(default_factory=generate_salt)
# Random nonce used as a ChaCha20Poly1305 parameter. Updated on each write to the file.
nonce: bytes = field(default_factory=generate_nonce)
# Encrypted and base64 encoded keyring data.
# - The data with CHECKBYTES_VALUE prepended is encrypted using ChaCha20Poly1305.
# - The symmetric key is derived from the master passphrase using PBKDF2.
data: Optional[str] = None
# An optional passphrase hint
passphrase_hint: Optional[str] = None
def __post_init__(self) -> None:
self.salt = convert_byte_type(bytes, self.salt)
self.nonce = convert_byte_type(bytes, self.nonce)
@classmethod
def create_from_path(cls, path: Path) -> FileKeyringContent:
loaded_dict = dict(yaml.safe_load(open(path, "r")))
version = int(loaded_dict["version"])
if version > MAX_SUPPORTED_VERSION:
raise FileKeyringVersionError(version)
return cls(**loaded_dict)
def write_to_path(self, path: Path) -> None:
os.makedirs(os.path.dirname(path), 0o700, True)
temp_path: Path = path.with_suffix("." + str(os.getpid()))
with open(os.open(str(temp_path), os.O_CREAT | os.O_EXCL | os.O_WRONLY, 0o600), "w") as f:
_ = yaml.safe_dump(self.to_dict(), f)
try:
os.replace(str(temp_path), path)
except PermissionError:
shutil.move(str(temp_path), str(path))
def get_decrypted_data_dict(self, passphrase: str) -> Dict[str, Any]:
if self.empty():
return {}
key = symmetric_key_from_passphrase(passphrase, self.salt)
encrypted_data_yml = base64.b64decode(yaml.safe_load(self.data or ""))
data_yml = decrypt_data(encrypted_data_yml, key, self.nonce)
return dict(yaml.safe_load(data_yml))
def update_encrypted_data_dict(self, passphrase: str, decrypted_dict: Dict[str, Any], update_salt: bool) -> None:
self.nonce = generate_nonce()
if update_salt:
self.salt = generate_salt()
data_yaml = yaml.safe_dump(decrypted_dict)
key = symmetric_key_from_passphrase(passphrase, self.salt)
self.data = base64.b64encode(encrypt_data(data_yaml.encode(), key, self.nonce)).decode("utf-8")
def empty(self) -> bool:
return self.data is None or len(self.data) == 0
def to_dict(self) -> Dict[str, Any]:
result = asdict(self)
result["salt"] = result["salt"].hex()
result["nonce"] = result["nonce"].hex()
return result
@final
@dataclass
class FileKeyring(FileSystemEventHandler): # type: ignore[misc] # Class cannot subclass "" (has type "Any")
"""
FileKeyring provides a file-based keyring store to manage a FileKeyringContent .The public interface is intended
to align with the API provided by the keyring module such that the KeyringWrapper class can pick an appropriate
keyring store backend based on the OS.
"""
keyring_path: Path
# Cache of the whole plaintext YAML file contents (never encrypted)
cached_file_content: FileKeyringContent
keyring_observer: Observer = field(default_factory=Observer)
load_keyring_lock: threading.RLock = field(default_factory=threading.RLock) # Guards access to needs_load_keyring
needs_load_keyring: bool = False
# Cache of the decrypted YAML contained in keyring.data
cached_data_dict: Dict[str, Any] = field(default_factory=default_file_keyring_data)
keyring_last_mod_time: Optional[float] = None
# Key/value pairs to set on the outer payload on the next write
file_content_properties_for_next_write: Dict[str, Any] = field(default_factory=dict)
@classmethod
def create(cls, keys_root_path: Path = DEFAULT_KEYS_ROOT_PATH) -> FileKeyring:
"""
Creates a fresh keyring.yaml file if necessary. Otherwise, loads and caches file content.
"""
keyring_path = keyring_path_from_root(keys_root_path)
try:
file_content = FileKeyringContent.create_from_path(keyring_path)
except FileNotFoundError:
# Write the default file content to disk
file_content = FileKeyringContent()
file_content.write_to_path(keyring_path)
obj = cls(
keyring_path=keyring_path,
cached_file_content=file_content,
)
obj.setup_keyring_file_watcher()
return obj
def __hash__(self) -> int:
return hash(self.keyring_path)
@contextlib.contextmanager
def lock_and_reload_if_required(self) -> Iterator[None]:
with Lockfile.create(self.keyring_path, timeout=30, poll_interval=0.2):
self.check_if_keyring_file_modified()
with self.load_keyring_lock:
if self.needs_load_keyring:
self.load_keyring()
yield
def setup_keyring_file_watcher(self) -> None:
# recursive=True necessary for macOS support
if not self.keyring_observer.is_alive():
self.keyring_observer.schedule(self, self.keyring_path.parent, recursive=True)
self.keyring_observer.start()
def cleanup_keyring_file_watcher(self) -> None:
if self.keyring_observer.is_alive():
self.keyring_observer.stop()
self.keyring_observer.join()
def on_modified(self, event: Union[FileSystemEvent, DirModifiedEvent]) -> None:
self.check_if_keyring_file_modified()
def check_if_keyring_file_modified(self) -> None:
try:
last_modified = os.stat(self.keyring_path).st_mtime
if not self.keyring_last_mod_time or self.keyring_last_mod_time < last_modified:
self.keyring_last_mod_time = last_modified
with self.load_keyring_lock:
self.needs_load_keyring = True
except FileNotFoundError:
# If the file doesn't exist there's nothing to do...
pass
def has_content(self) -> bool:
"""
Quick test to determine if keyring contains anything in keyring.data.
"""
return not self.cached_file_content.empty()
def cached_keys(self) -> Dict[str, Dict[str, str]]:
"""
Returns keyring.data.keys
"""
keys_dict: Dict[str, Dict[str, str]] = self.cached_data_dict["keys"]
return keys_dict
def cached_labels(self) -> Dict[int, str]:
"""
Returns keyring.data.labels
"""
labels_dict: Dict[int, str] = self.cached_data_dict["labels"]
return labels_dict
def get_password(self, service: str, user: str) -> Optional[str]:
"""
Returns the passphrase named by the 'user' parameter from the cached
keyring data (does not force a read from disk)
"""
with self.lock_and_reload_if_required():
return self.cached_keys().get(service, {}).get(user)
def set_password(self, service: str, user: str, passphrase: str) -> None:
"""
Store the passphrase to the keyring data using the name specified by the
'user' parameter. Will force a write to keyring.yaml on success.
"""
with self.lock_and_reload_if_required():
keys = self.cached_keys()
# Ensure a dictionary exists for the 'service'
if keys.get(service) is None:
keys[service] = {}
keys[service][user] = passphrase
self.write_keyring()
def delete_password(self, service: str, user: str) -> None:
"""
Deletes the passphrase named by the 'user' parameter from the keyring data
(will force a write to keyring.yaml on success)
"""
with self.lock_and_reload_if_required():
keys = self.cached_keys()
service_dict = keys.get(service, {})
if service_dict.pop(user, None):
if len(service_dict) == 0:
keys.pop(service)
self.write_keyring()
def get_label(self, fingerprint: int) -> Optional[str]:
"""
Returns the label for the given fingerprint or None if there is no label assigned.
"""
with self.lock_and_reload_if_required():
return self.cached_labels().get(fingerprint)
def set_label(self, fingerprint: int, label: str) -> None:
"""
Set a label for the given fingerprint. This will force a write to keyring.yaml on success.
"""
# First validate the label
stripped_label = label.strip()
if len(stripped_label) == 0:
raise KeychainLabelInvalid(label, "label can't be empty or whitespace only")
if len(stripped_label) != len(label):
raise KeychainLabelInvalid(label, "label can't contain leading or trailing whitespaces")
if len(label) != len(label.replace("\n", "").replace("\t", "")):
raise KeychainLabelInvalid(label, "label can't contain newline or tab")
if len(label) > MAX_LABEL_LENGTH:
raise KeychainLabelInvalid(label, f"label exceeds max length: {len(label)}/{MAX_LABEL_LENGTH}")
# Then try to set it
with self.lock_and_reload_if_required():
labels = self.cached_labels()
for existing_fingerprint, existing_label in labels.items():
if label == existing_label:
raise KeychainLabelExists(label, existing_fingerprint)
labels[fingerprint] = label
self.write_keyring()
def delete_label(self, fingerprint: int) -> None:
"""
Removes the label for the fingerprint. This will force a write to keyring.yaml on success.
"""
with self.lock_and_reload_if_required():
try:
self.cached_labels().pop(fingerprint)
except KeyError as e:
raise KeychainFingerprintNotFound(fingerprint) from e
self.write_keyring()
def check_passphrase(self, passphrase: str, force_reload: bool = False) -> bool:
"""
Attempts to validate the passphrase by decrypting keyring.data
contents and checking the checkbytes value
"""
if force_reload:
self.cached_file_content = FileKeyringContent.create_from_path(self.keyring_path)
try:
self.cached_file_content.get_decrypted_data_dict(passphrase)
return True
except Exception:
return False
def load_keyring(self, passphrase: Optional[str] = None) -> None:
from flax.cmds.passphrase_funcs import obtain_current_passphrase
with self.load_keyring_lock:
self.needs_load_keyring = False
self.cached_file_content = FileKeyringContent.create_from_path(self.keyring_path)
if not self.has_content():
return
if passphrase is None:
# TODO, this prompts for the passphrase interactively, move this out
passphrase = obtain_current_passphrase(use_passphrase_cache=True)
self.cached_data_dict.update(self.cached_file_content.get_decrypted_data_dict(passphrase))
def write_keyring(self, fresh_salt: bool = False) -> None:
from flax.cmds.passphrase_funcs import obtain_current_passphrase
from flax.util.keyring_wrapper import KeyringWrapper
# Merge in other properties like "passphrase_hint"
if "passphrase_hint" in self.file_content_properties_for_next_write:
self.cached_file_content.passphrase_hint = self.file_content_properties_for_next_write["passphrase_hint"]
# When writing for the first time, we should have a cached passphrase which hasn't been
# validated (because it can't be validated yet...)
# TODO Fix hinting in `KeyringWrapper` to get rid of the ignores below
if not self.has_content() and KeyringWrapper.get_shared_instance().has_cached_master_passphrase(): # type: ignore[no-untyped-call] # noqa: E501
passphrase = KeyringWrapper.get_shared_instance().get_cached_master_passphrase()[0] # type: ignore[no-untyped-call] # noqa: E501
else:
# TODO, this prompts for the passphrase interactively, move this out
passphrase = obtain_current_passphrase(use_passphrase_cache=True)
try:
self.cached_file_content.update_encrypted_data_dict(passphrase, self.cached_data_dict, fresh_salt)
self.cached_file_content.write_to_path(self.keyring_path)
# Cleanup the cached properties now that we wrote the new content to file
self.file_content_properties_for_next_write = {}
except Exception:
# Restore the correct content if we failed to write the updated cache, let it re-raise if loading also fails
self.cached_file_content = FileKeyringContent.create_from_path(self.keyring_path)
def get_passphrase_hint(self) -> Optional[str]:
"""
Return the passphrase hint (if set). The hint data may not yet be written to the keyring, so we
return the hint data either from the staging dict (file_content_properties_for_next_write), or
from cached_file_content (loaded from the keyring)
"""
passphrase_hint: Optional[str] = self.file_content_properties_for_next_write.get("passphrase_hint", None)
if passphrase_hint is None:
passphrase_hint = self.cached_file_content.passphrase_hint
return passphrase_hint
def set_passphrase_hint(self, passphrase_hint: Optional[str]) -> None:
"""
Store the new passphrase hint in the staging dict (file_content_properties_for_next_write) to
be written-out on the next write to the keyring.
"""
self.file_content_properties_for_next_write["passphrase_hint"] = passphrase_hint
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/plot_sync/receiver.py | flax/plot_sync/receiver.py | from __future__ import annotations
import logging
import time
from dataclasses import dataclass, field
from typing import Any, Awaitable, Callable, Collection, Dict, List, Optional
from typing_extensions import Protocol
from flax.plot_sync.delta import Delta, PathListDelta, PlotListDelta
from flax.plot_sync.exceptions import (
InvalidIdentifierError,
InvalidLastSyncIdError,
PlotAlreadyAvailableError,
PlotNotAvailableError,
PlotSyncException,
SyncIdsMatchError,
)
from flax.plot_sync.util import ErrorCodes, State, T_PlotSyncMessage
from flax.protocols.harvester_protocol import (
Plot,
PlotSyncDone,
PlotSyncError,
PlotSyncIdentifier,
PlotSyncPathList,
PlotSyncPlotList,
PlotSyncResponse,
PlotSyncStart,
)
from flax.server.ws_connection import ProtocolMessageTypes, WSFlaxConnection, make_msg
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.ints import int16, uint32, uint64
from flax.util.misc import get_list_or_len
log = logging.getLogger(__name__)
@dataclass
class Sync:
state: State = State.idle
sync_id: uint64 = uint64(0)
next_message_id: uint64 = uint64(0)
plots_processed: uint32 = uint32(0)
plots_total: uint32 = uint32(0)
delta: Delta = field(default_factory=Delta)
time_done: Optional[float] = None
def in_progress(self) -> bool:
return self.sync_id != 0
def bump_next_message_id(self) -> None:
self.next_message_id = uint64(self.next_message_id + 1)
def bump_plots_processed(self) -> None:
self.plots_processed = uint32(self.plots_processed + 1)
def __str__(self) -> str:
return (
f"[state {self.state}, "
f"sync_id {self.sync_id}, "
f"next_message_id {self.next_message_id}, "
f"plots_processed {self.plots_processed}, "
f"plots_total {self.plots_total}, "
f"delta {self.delta}, "
f"time_done {self.time_done}]"
)
class ReceiverUpdateCallback(Protocol):
def __call__(self, peer_id: bytes32, delta: Optional[Delta]) -> Awaitable[None]:
pass
class Receiver:
_connection: WSFlaxConnection
_current_sync: Sync
_last_sync: Sync
_plots: Dict[str, Plot]
_invalid: List[str]
_keys_missing: List[str]
_duplicates: List[str]
_total_plot_size: int
_update_callback: ReceiverUpdateCallback
def __init__(
self,
connection: WSFlaxConnection,
update_callback: ReceiverUpdateCallback,
) -> None:
self._connection = connection
self._current_sync = Sync()
self._last_sync = Sync()
self._plots = {}
self._invalid = []
self._keys_missing = []
self._duplicates = []
self._total_plot_size = 0
self._update_callback = update_callback
async def trigger_callback(self, update: Optional[Delta] = None) -> None:
try:
await self._update_callback(self._connection.peer_node_id, update)
except Exception as e:
log.error(f"_update_callback: node_id {self.connection().peer_node_id}, raised {e}")
def reset(self) -> None:
log.info(f"reset: node_id {self.connection().peer_node_id}, current_sync: {self._current_sync}")
self._current_sync = Sync()
self._last_sync = Sync()
self._plots.clear()
self._invalid.clear()
self._keys_missing.clear()
self._duplicates.clear()
self._total_plot_size = 0
def connection(self) -> WSFlaxConnection:
return self._connection
def current_sync(self) -> Sync:
return self._current_sync
def last_sync(self) -> Sync:
return self._last_sync
def initial_sync(self) -> bool:
return self._last_sync.sync_id == 0
def plots(self) -> Dict[str, Plot]:
return self._plots
def invalid(self) -> List[str]:
return self._invalid
def keys_missing(self) -> List[str]:
return self._keys_missing
def duplicates(self) -> List[str]:
return self._duplicates
def total_plot_size(self) -> int:
return self._total_plot_size
async def _process(
self, method: Callable[[T_PlotSyncMessage], Any], message_type: ProtocolMessageTypes, message: T_PlotSyncMessage
) -> None:
log.debug(
f"_process: node_id {self.connection().peer_node_id}, message_type: {message_type}, message: {message}"
)
async def send_response(plot_sync_error: Optional[PlotSyncError] = None) -> None:
if self._connection is not None:
await self._connection.send_message(
make_msg(
ProtocolMessageTypes.plot_sync_response,
PlotSyncResponse(message.identifier, int16(message_type.value), plot_sync_error),
)
)
try:
await method(message)
await send_response()
except InvalidIdentifierError as e:
log.warning(f"_process: node_id {self.connection().peer_node_id}, InvalidIdentifierError {e}")
await send_response(PlotSyncError(int16(e.error_code), f"{e}", e.expected_identifier))
except PlotSyncException as e:
log.warning(f"_process: node_id {self.connection().peer_node_id}, Error {e}")
await send_response(PlotSyncError(int16(e.error_code), f"{e}", None))
except Exception as e:
log.warning(f"_process: node_id {self.connection().peer_node_id}, Exception {e}")
await send_response(PlotSyncError(int16(ErrorCodes.unknown), f"{e}", None))
def _validate_identifier(self, identifier: PlotSyncIdentifier, start: bool = False) -> None:
sync_id_match = identifier.sync_id == self._current_sync.sync_id
message_id_match = identifier.message_id == self._current_sync.next_message_id
identifier_match = sync_id_match and message_id_match
if (start and not message_id_match) or (not start and not identifier_match):
expected: PlotSyncIdentifier = PlotSyncIdentifier(
identifier.timestamp, self._current_sync.sync_id, self._current_sync.next_message_id
)
raise InvalidIdentifierError(
identifier,
expected,
)
async def _sync_started(self, data: PlotSyncStart) -> None:
if data.initial:
self.reset()
self._validate_identifier(data.identifier, True)
if data.last_sync_id != self._last_sync.sync_id:
raise InvalidLastSyncIdError(data.last_sync_id, self._last_sync.sync_id)
if data.last_sync_id == data.identifier.sync_id:
raise SyncIdsMatchError(State.idle, data.last_sync_id)
self._current_sync.sync_id = data.identifier.sync_id
self._current_sync.delta.clear()
self._current_sync.state = State.loaded
self._current_sync.plots_total = data.plot_file_count
self._current_sync.bump_next_message_id()
async def sync_started(self, data: PlotSyncStart) -> None:
await self._process(self._sync_started, ProtocolMessageTypes.plot_sync_start, data)
async def _process_loaded(self, plot_infos: PlotSyncPlotList) -> None:
self._validate_identifier(plot_infos.identifier)
for plot_info in plot_infos.data:
if plot_info.filename in self._plots or plot_info.filename in self._current_sync.delta.valid.additions:
raise PlotAlreadyAvailableError(State.loaded, plot_info.filename)
self._current_sync.delta.valid.additions[plot_info.filename] = plot_info
self._current_sync.bump_plots_processed()
# Let the callback receiver know about the sync progress updates
await self.trigger_callback()
if plot_infos.final:
self._current_sync.state = State.removed
self._current_sync.bump_next_message_id()
async def process_loaded(self, plot_infos: PlotSyncPlotList) -> None:
await self._process(self._process_loaded, ProtocolMessageTypes.plot_sync_loaded, plot_infos)
async def process_path_list(
self,
*,
state: State,
next_state: State,
target: Collection[str],
delta: List[str],
paths: PlotSyncPathList,
is_removal: bool = False,
) -> None:
self._validate_identifier(paths.identifier)
for path in paths.data:
if is_removal and (path not in target or path in delta):
raise PlotNotAvailableError(state, path)
if not is_removal and path in delta:
raise PlotAlreadyAvailableError(state, path)
delta.append(path)
if not is_removal:
self._current_sync.bump_plots_processed()
# Let the callback receiver know about the sync progress updates
await self.trigger_callback()
if paths.final:
self._current_sync.state = next_state
self._current_sync.bump_next_message_id()
async def _process_removed(self, paths: PlotSyncPathList) -> None:
await self.process_path_list(
state=State.removed,
next_state=State.invalid,
target=self._plots,
delta=self._current_sync.delta.valid.removals,
paths=paths,
is_removal=True,
)
async def process_removed(self, paths: PlotSyncPathList) -> None:
await self._process(self._process_removed, ProtocolMessageTypes.plot_sync_removed, paths)
async def _process_invalid(self, paths: PlotSyncPathList) -> None:
await self.process_path_list(
state=State.invalid,
next_state=State.keys_missing,
target=self._invalid,
delta=self._current_sync.delta.invalid.additions,
paths=paths,
)
async def process_invalid(self, paths: PlotSyncPathList) -> None:
await self._process(self._process_invalid, ProtocolMessageTypes.plot_sync_invalid, paths)
async def _process_keys_missing(self, paths: PlotSyncPathList) -> None:
await self.process_path_list(
state=State.keys_missing,
next_state=State.duplicates,
target=self._keys_missing,
delta=self._current_sync.delta.keys_missing.additions,
paths=paths,
)
async def process_keys_missing(self, paths: PlotSyncPathList) -> None:
await self._process(self._process_keys_missing, ProtocolMessageTypes.plot_sync_keys_missing, paths)
async def _process_duplicates(self, paths: PlotSyncPathList) -> None:
await self.process_path_list(
state=State.duplicates,
next_state=State.done,
target=self._duplicates,
delta=self._current_sync.delta.duplicates.additions,
paths=paths,
)
async def process_duplicates(self, paths: PlotSyncPathList) -> None:
await self._process(self._process_duplicates, ProtocolMessageTypes.plot_sync_duplicates, paths)
async def _sync_done(self, data: PlotSyncDone) -> None:
self._validate_identifier(data.identifier)
self._current_sync.time_done = time.time()
# First create the update delta (i.e. transform invalid/keys_missing into additions/removals) which we will
# send to the callback receiver below
delta_invalid: PathListDelta = PathListDelta.from_lists(
self._invalid, self._current_sync.delta.invalid.additions
)
delta_keys_missing: PathListDelta = PathListDelta.from_lists(
self._keys_missing, self._current_sync.delta.keys_missing.additions
)
delta_duplicates: PathListDelta = PathListDelta.from_lists(
self._duplicates, self._current_sync.delta.duplicates.additions
)
update = Delta(
PlotListDelta(
self._current_sync.delta.valid.additions.copy(), self._current_sync.delta.valid.removals.copy()
),
delta_invalid,
delta_keys_missing,
delta_duplicates,
)
# Apply delta
self._plots.update(self._current_sync.delta.valid.additions)
for removal in self._current_sync.delta.valid.removals:
del self._plots[removal]
self._invalid = self._current_sync.delta.invalid.additions.copy()
self._keys_missing = self._current_sync.delta.keys_missing.additions.copy()
self._duplicates = self._current_sync.delta.duplicates.additions.copy()
self._total_plot_size = sum(plot.file_size for plot in self._plots.values())
# Save current sync as last sync and create a new current sync
self._last_sync = self._current_sync
self._current_sync = Sync()
# Let the callback receiver know if this sync cycle caused any update
await self.trigger_callback(update)
async def sync_done(self, data: PlotSyncDone) -> None:
await self._process(self._sync_done, ProtocolMessageTypes.plot_sync_done, data)
def to_dict(self, counts_only: bool = False) -> Dict[str, Any]:
syncing = None
if self._current_sync.in_progress():
syncing = {
"initial": self.initial_sync(),
"plot_files_processed": self._current_sync.plots_processed,
"plot_files_total": self._current_sync.plots_total,
}
return {
"connection": {
"node_id": self._connection.peer_node_id,
"host": self._connection.peer_host,
"port": self._connection.peer_port,
},
"plots": get_list_or_len(list(self._plots.values()), counts_only),
"failed_to_open_filenames": get_list_or_len(self._invalid, counts_only),
"no_key_filenames": get_list_or_len(self._keys_missing, counts_only),
"duplicates": get_list_or_len(self._duplicates, counts_only),
"total_plot_size": self._total_plot_size,
"syncing": syncing,
"last_sync_time": self._last_sync.time_done,
}
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/plot_sync/util.py | flax/plot_sync/util.py | from __future__ import annotations
from enum import IntEnum
from typing import TypeVar
from typing_extensions import Protocol
from flax.protocols.harvester_protocol import PlotSyncIdentifier
class Constants:
message_timeout: int = 10
class State(IntEnum):
idle = 0
loaded = 1
removed = 2
invalid = 3
keys_missing = 4
duplicates = 5
done = 6
class ErrorCodes(IntEnum):
unknown = -1
invalid_state = 0
invalid_peer_id = 1
invalid_identifier = 2
invalid_last_sync_id = 3
invalid_connection_type = 4
plot_already_available = 5
plot_not_available = 6
sync_ids_match = 7
class PlotSyncMessage(Protocol):
@property
def identifier(self) -> PlotSyncIdentifier:
pass
T_PlotSyncMessage = TypeVar("T_PlotSyncMessage", bound=PlotSyncMessage)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/plot_sync/exceptions.py | flax/plot_sync/exceptions.py | from __future__ import annotations
from typing import Any
from flax.plot_sync.util import ErrorCodes, State
from flax.protocols.harvester_protocol import PlotSyncIdentifier
from flax.server.ws_connection import NodeType
from flax.util.ints import uint64
class PlotSyncException(Exception):
def __init__(self, message: str, error_code: ErrorCodes) -> None:
super().__init__(message)
self.error_code = error_code
class AlreadyStartedError(Exception):
def __init__(self) -> None:
super().__init__("Already started!")
class InvalidValueError(PlotSyncException):
def __init__(self, message: str, actual: Any, expected: Any, error_code: ErrorCodes) -> None:
super().__init__(f"{message}: Actual {actual}, Expected {expected}", error_code)
class InvalidIdentifierError(InvalidValueError):
def __init__(self, actual_identifier: PlotSyncIdentifier, expected_identifier: PlotSyncIdentifier) -> None:
super().__init__("Invalid identifier", actual_identifier, expected_identifier, ErrorCodes.invalid_identifier)
self.actual_identifier: PlotSyncIdentifier = actual_identifier
self.expected_identifier: PlotSyncIdentifier = expected_identifier
class InvalidLastSyncIdError(InvalidValueError):
def __init__(self, actual: uint64, expected: uint64) -> None:
super().__init__("Invalid last-sync-id", actual, expected, ErrorCodes.invalid_last_sync_id)
class InvalidConnectionTypeError(InvalidValueError):
def __init__(self, actual: NodeType, expected: NodeType) -> None:
super().__init__("Unexpected connection type", actual, expected, ErrorCodes.invalid_connection_type)
class PlotAlreadyAvailableError(PlotSyncException):
def __init__(self, state: State, path: str) -> None:
super().__init__(f"{state.name}: Plot already available - {path}", ErrorCodes.plot_already_available)
class PlotNotAvailableError(PlotSyncException):
def __init__(self, state: State, path: str) -> None:
super().__init__(f"{state.name}: Plot not available - {path}", ErrorCodes.plot_not_available)
class SyncIdsMatchError(PlotSyncException):
def __init__(self, state: State, sync_id: uint64) -> None:
super().__init__(f"{state.name}: Sync ids are equal - {sync_id}", ErrorCodes.sync_ids_match)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/plot_sync/sender.py | flax/plot_sync/sender.py | from __future__ import annotations
import asyncio
import logging
import time
import traceback
from dataclasses import dataclass
from pathlib import Path
from typing import Any, Generic, Iterable, List, Optional, Tuple, Type, TypeVar
from typing_extensions import Protocol
from flax.plot_sync.exceptions import AlreadyStartedError, InvalidConnectionTypeError
from flax.plot_sync.util import Constants
from flax.plotting.manager import PlotManager
from flax.plotting.util import PlotInfo
from flax.protocols.harvester_protocol import (
Plot,
PlotSyncDone,
PlotSyncIdentifier,
PlotSyncPathList,
PlotSyncPlotList,
PlotSyncResponse,
PlotSyncStart,
)
from flax.server.ws_connection import NodeType, ProtocolMessageTypes, WSFlaxConnection, make_msg
from flax.util.generator_tools import list_to_batches
from flax.util.ints import int16, uint32, uint64
log = logging.getLogger(__name__)
def _convert_plot_info_list(plot_infos: List[PlotInfo]) -> List[Plot]:
converted: List[Plot] = []
for plot_info in plot_infos:
converted.append(
Plot(
filename=plot_info.prover.get_filename(),
size=plot_info.prover.get_size(),
plot_id=plot_info.prover.get_id(),
pool_public_key=plot_info.pool_public_key,
pool_contract_puzzle_hash=plot_info.pool_contract_puzzle_hash,
plot_public_key=plot_info.plot_public_key,
file_size=uint64(plot_info.file_size),
time_modified=uint64(int(plot_info.time_modified)),
)
)
return converted
class PayloadType(Protocol):
def __init__(self, identifier: PlotSyncIdentifier, *args: object) -> None:
...
def __bytes__(self) -> bytes:
pass
T = TypeVar("T", bound=PayloadType)
@dataclass
class MessageGenerator(Generic[T]):
sync_id: uint64
message_type: ProtocolMessageTypes
message_id: uint64
payload_type: Type[T]
args: Iterable[object]
def generate(self) -> Tuple[PlotSyncIdentifier, T]:
identifier = PlotSyncIdentifier(uint64(int(time.time())), self.sync_id, self.message_id)
payload = self.payload_type(identifier, *self.args)
return identifier, payload
@dataclass
class ExpectedResponse:
message_type: ProtocolMessageTypes
identifier: PlotSyncIdentifier
message: Optional[PlotSyncResponse] = None
def __str__(self) -> str:
return (
f"expected_message_type: {self.message_type.name}, "
f"expected_identifier: {self.identifier}, message {self.message}"
)
class Sender:
_plot_manager: PlotManager
_connection: Optional[WSFlaxConnection]
_sync_id: uint64
_next_message_id: uint64
_messages: List[MessageGenerator[PayloadType]]
_last_sync_id: uint64
_stop_requested = False
_task: Optional[asyncio.Task[None]]
_response: Optional[ExpectedResponse]
def __init__(self, plot_manager: PlotManager) -> None:
self._plot_manager = plot_manager
self._connection = None
self._sync_id = uint64(0)
self._next_message_id = uint64(0)
self._messages = []
self._last_sync_id = uint64(0)
self._stop_requested = False
self._task = None
self._response = None
def __str__(self) -> str:
return f"sync_id {self._sync_id}, next_message_id {self._next_message_id}, messages {len(self._messages)}"
async def start(self) -> None:
if self._task is not None and self._stop_requested:
await self.await_closed()
if self._task is None:
self._task = asyncio.create_task(self._run())
# TODO, Add typing in PlotManager
if not self._plot_manager.initial_refresh() or self._sync_id != 0: # type:ignore[no-untyped-call]
self._reset()
else:
raise AlreadyStartedError()
def stop(self) -> None:
self._stop_requested = True
async def await_closed(self) -> None:
if self._task is not None:
await self._task
self._task = None
self._reset()
self._stop_requested = False
def set_connection(self, connection: WSFlaxConnection) -> None:
assert connection.connection_type is not None
if connection.connection_type != NodeType.FARMER:
raise InvalidConnectionTypeError(connection.connection_type, NodeType.HARVESTER)
self._connection = connection
def bump_next_message_id(self) -> None:
self._next_message_id = uint64(self._next_message_id + 1)
def _reset(self) -> None:
log.debug(f"_reset {self}")
self._last_sync_id = uint64(0)
self._sync_id = uint64(0)
self._next_message_id = uint64(0)
self._messages.clear()
if self._task is not None:
self.sync_start(self._plot_manager.plot_count(), True)
for remaining, batch in list_to_batches(
list(self._plot_manager.plots.values()), self._plot_manager.refresh_parameter.batch_size
):
self.process_batch(batch, remaining)
self.sync_done([], 0)
async def _wait_for_response(self) -> bool:
start = time.time()
assert self._response is not None
while time.time() - start < Constants.message_timeout and self._response.message is None:
await asyncio.sleep(0.1)
return self._response.message is not None
def set_response(self, response: PlotSyncResponse) -> bool:
if self._response is None or self._response.message is not None:
log.warning(f"set_response skip unexpected response: {response}")
return False
if time.time() - float(response.identifier.timestamp) > Constants.message_timeout:
log.warning(f"set_response skip expired response: {response}")
return False
if response.identifier.sync_id != self._response.identifier.sync_id:
log.warning(
"set_response unexpected sync-id: " f"{response.identifier.sync_id}/{self._response.identifier.sync_id}"
)
return False
if response.identifier.message_id != self._response.identifier.message_id:
log.warning(
"set_response unexpected message-id: "
f"{response.identifier.message_id}/{self._response.identifier.message_id}"
)
return False
if response.message_type != int16(self._response.message_type.value):
log.warning(
"set_response unexpected message-type: " f"{response.message_type}/{self._response.message_type.value}"
)
return False
log.debug(f"set_response valid {response}")
self._response.message = response
return True
def _add_message(self, message_type: ProtocolMessageTypes, payload_type: Any, *args: Any) -> None:
assert self._sync_id != 0
message_id = uint64(len(self._messages))
self._messages.append(MessageGenerator(self._sync_id, message_type, message_id, payload_type, args))
async def _send_next_message(self) -> bool:
def failed(message: str) -> bool:
# By forcing a reset we try to get back into a normal state if some not recoverable failure came up.
log.warning(message)
self._reset()
return False
assert len(self._messages) >= self._next_message_id
message_generator = self._messages[self._next_message_id]
identifier, payload = message_generator.generate()
if self._sync_id == 0 or identifier.sync_id != self._sync_id or identifier.message_id != self._next_message_id:
return failed(f"Invalid message generator {message_generator} for {self}")
self._response = ExpectedResponse(message_generator.message_type, identifier)
log.debug(f"_send_next_message send {message_generator.message_type.name}: {payload}")
if self._connection is None or not await self._connection.send_message(
make_msg(message_generator.message_type, payload)
):
return failed(f"Send failed {self._connection}")
if not await self._wait_for_response():
log.info(f"_send_next_message didn't receive response {self._response}")
return False
assert self._response.message is not None
if self._response.message.error is not None:
recovered = False
expected = self._response.message.error.expected_identifier
# If we have a recoverable error there is a `expected_identifier` included
if expected is not None:
# If the receiver has a zero sync/message id and we already sent all messages from the current event
# we most likely missed the response to the done message. We can finalize the sync and move on here.
all_sent = (
self._messages[-1].message_type == ProtocolMessageTypes.plot_sync_done
and self._next_message_id == len(self._messages) - 1
)
if expected.sync_id == expected.message_id == 0 and all_sent:
self._finalize_sync()
recovered = True
elif self._sync_id == expected.sync_id and expected.message_id < len(self._messages):
self._next_message_id = expected.message_id
recovered = True
if not recovered:
return failed(f"Not recoverable error {self._response.message}")
return True
if self._response.message_type == ProtocolMessageTypes.plot_sync_done:
self._finalize_sync()
else:
self.bump_next_message_id()
return True
def _add_list_batched(self, message_type: ProtocolMessageTypes, payload_type: Any, data: List[Any]) -> None:
if len(data) == 0:
self._add_message(message_type, payload_type, [], True)
return
for remaining, batch in list_to_batches(data, self._plot_manager.refresh_parameter.batch_size):
self._add_message(message_type, payload_type, batch, remaining == 0)
def sync_start(self, count: float, initial: bool) -> None:
log.debug(f"sync_start {self}: count {count}, initial {initial}")
while self.sync_active():
if self._stop_requested:
log.debug("sync_start aborted")
return
time.sleep(0.1)
sync_id = int(time.time())
# Make sure we have unique sync-id's even if we restart refreshing within a second (i.e. in tests)
if sync_id == self._last_sync_id:
sync_id = sync_id + 1
log.debug(f"sync_start {sync_id}")
self._sync_id = uint64(sync_id)
self._add_message(
ProtocolMessageTypes.plot_sync_start, PlotSyncStart, initial, self._last_sync_id, uint32(int(count))
)
def process_batch(self, loaded: List[PlotInfo], remaining: int) -> None:
log.debug(f"process_batch {self}: loaded {len(loaded)}, remaining {remaining}")
if len(loaded) > 0 or remaining == 0:
converted = _convert_plot_info_list(loaded)
self._add_message(ProtocolMessageTypes.plot_sync_loaded, PlotSyncPlotList, converted, remaining == 0)
def sync_done(self, removed: List[Path], duration: float) -> None:
log.debug(f"sync_done {self}: removed {len(removed)}, duration {duration}")
removed_list = [str(x) for x in removed]
self._add_list_batched(
ProtocolMessageTypes.plot_sync_removed,
PlotSyncPathList,
removed_list,
)
failed_to_open_list = [str(x) for x in list(self._plot_manager.failed_to_open_filenames)]
self._add_list_batched(ProtocolMessageTypes.plot_sync_invalid, PlotSyncPathList, failed_to_open_list)
no_key_list = [str(x) for x in self._plot_manager.no_key_filenames]
self._add_list_batched(ProtocolMessageTypes.plot_sync_keys_missing, PlotSyncPathList, no_key_list)
duplicates_list = self._plot_manager.get_duplicates().copy()
self._add_list_batched(ProtocolMessageTypes.plot_sync_duplicates, PlotSyncPathList, duplicates_list)
self._add_message(ProtocolMessageTypes.plot_sync_done, PlotSyncDone, uint64(int(duration)))
def _finalize_sync(self) -> None:
log.debug(f"_finalize_sync {self}")
assert self._sync_id != 0
self._last_sync_id = self._sync_id
self._next_message_id = uint64(0)
self._messages.clear()
# Do this at the end since `_sync_id` is used as sync active indicator.
self._sync_id = uint64(0)
def sync_active(self) -> bool:
return self._sync_id != 0
def connected(self) -> bool:
return self._connection is not None
async def _run(self) -> None:
"""
This is the sender task responsible to send new messages during sync as they come into Sender._messages
triggered by the plot manager callback.
"""
while not self._stop_requested:
try:
while not self.connected() or not self.sync_active():
if self._stop_requested:
return
await asyncio.sleep(0.1)
while not self._stop_requested and self.sync_active():
if self._next_message_id >= len(self._messages):
await asyncio.sleep(0.1)
continue
if not await self._send_next_message():
await asyncio.sleep(Constants.message_timeout)
except Exception as e:
log.error(f"Exception: {e} {traceback.format_exc()}")
self._reset()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/plot_sync/delta.py | flax/plot_sync/delta.py | from __future__ import annotations
from dataclasses import dataclass, field
from typing import Dict, List, Union
from flax.protocols.harvester_protocol import Plot
@dataclass
class DeltaType:
additions: Union[Dict[str, Plot], List[str]]
removals: List[str]
def __str__(self) -> str:
return f"+{len(self.additions)}/-{len(self.removals)}"
def clear(self) -> None:
self.additions.clear()
self.removals.clear()
def empty(self) -> bool:
return len(self.additions) == 0 and len(self.removals) == 0
@dataclass
class PlotListDelta(DeltaType):
additions: Dict[str, Plot] = field(default_factory=dict)
removals: List[str] = field(default_factory=list)
@dataclass
class PathListDelta(DeltaType):
additions: List[str] = field(default_factory=list)
removals: List[str] = field(default_factory=list)
@staticmethod
def from_lists(old: List[str], new: List[str]) -> "PathListDelta":
return PathListDelta([x for x in new if x not in old], [x for x in old if x not in new])
@dataclass
class Delta:
valid: PlotListDelta = field(default_factory=PlotListDelta)
invalid: PathListDelta = field(default_factory=PathListDelta)
keys_missing: PathListDelta = field(default_factory=PathListDelta)
duplicates: PathListDelta = field(default_factory=PathListDelta)
def empty(self) -> bool:
return self.valid.empty() and self.invalid.empty() and self.keys_missing.empty() and self.duplicates.empty()
def __str__(self) -> str:
return (
f"[valid {self.valid}, invalid {self.invalid}, keys missing: {self.keys_missing}, "
f"duplicates: {self.duplicates}]"
)
def clear(self) -> None:
self.valid.clear()
self.invalid.clear()
self.keys_missing.clear()
self.duplicates.clear()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/plot_sync/__init__.py | flax/plot_sync/__init__.py | python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false | |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/timelord/timelord.py | flax/timelord/timelord.py | from __future__ import annotations
import asyncio
import dataclasses
import io
import logging
import multiprocessing
import os
import random
import time
import traceback
from concurrent.futures import ProcessPoolExecutor
from typing import Any, Callable, Dict, List, Optional, Set, Tuple
from chiavdf import create_discriminant, prove
import flax.server.ws_connection as ws
from flax.consensus.constants import ConsensusConstants
from flax.consensus.pot_iterations import calculate_sp_iters, is_overflow_block
from flax.protocols import timelord_protocol
from flax.protocols.protocol_message_types import ProtocolMessageTypes
from flax.rpc.rpc_server import default_get_connections
from flax.server.outbound_message import NodeType, make_msg
from flax.server.server import FlaxServer
from flax.timelord.iters_from_block import iters_from_block
from flax.timelord.timelord_state import LastState
from flax.timelord.types import Chain, IterationType, StateType
from flax.types.blockchain_format.classgroup import ClassgroupElement
from flax.types.blockchain_format.reward_chain_block import RewardChainBlock
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.blockchain_format.slots import (
ChallengeChainSubSlot,
InfusedChallengeChainSubSlot,
RewardChainSubSlot,
SubSlotProofs,
)
from flax.types.blockchain_format.sub_epoch_summary import SubEpochSummary
from flax.types.blockchain_format.vdf import VDFInfo, VDFProof
from flax.types.end_of_slot_bundle import EndOfSubSlotBundle
from flax.util.config import process_config_start_method
from flax.util.ints import uint8, uint16, uint32, uint64, uint128
from flax.util.setproctitle import getproctitle, setproctitle
from flax.util.streamable import Streamable, streamable
log = logging.getLogger(__name__)
@streamable
@dataclasses.dataclass(frozen=True)
class BlueboxProcessData(Streamable):
challenge: bytes32
size_bits: uint16
iters: uint64
def prove_bluebox_slow(payload):
bluebox_process_data = BlueboxProcessData.from_bytes(payload)
initial_el = b"\x08" + (b"\x00" * 99)
return prove(
bluebox_process_data.challenge,
initial_el,
bluebox_process_data.size_bits,
bluebox_process_data.iters,
)
class Timelord:
@property
def server(self) -> FlaxServer:
# This is a stop gap until the class usage is refactored such the values of
# integral attributes are known at creation of the instance.
if self._server is None:
raise RuntimeError("server not assigned")
return self._server
def __init__(self, root_path, config: Dict, constants: ConsensusConstants):
self.config = config
self.root_path = root_path
self.constants = constants
self._shut_down = False
self.free_clients: List[Tuple[str, asyncio.StreamReader, asyncio.StreamWriter]] = []
self.ip_whitelist = self.config["vdf_clients"]["ip"]
self._server: Optional[FlaxServer] = None
self.chain_type_to_stream: Dict[Chain, Tuple[str, asyncio.StreamReader, asyncio.StreamWriter]] = {}
self.chain_start_time: Dict = {}
# Chains that currently don't have a vdf_client.
self.unspawned_chains: List[Chain] = [
Chain.CHALLENGE_CHAIN,
Chain.REWARD_CHAIN,
Chain.INFUSED_CHALLENGE_CHAIN,
]
# Chains that currently accept iterations.
self.allows_iters: List[Chain] = []
# Last peak received, None if it's already processed.
self.new_peak: Optional[timelord_protocol.NewPeakTimelord] = None
# Last end of subslot bundle, None if we built a peak on top of it.
self.new_subslot_end: Optional[EndOfSubSlotBundle] = None
# Last state received. Can either be a new peak or a new EndOfSubslotBundle.
# Unfinished block info, iters adjusted to the last peak.
self.unfinished_blocks: List[timelord_protocol.NewUnfinishedBlockTimelord] = []
# Signage points iters, adjusted to the last peak.
self.signage_point_iters: List[Tuple[uint64, uint8]] = []
# For each chain, send those info when the process spawns.
self.iters_to_submit: Dict[Chain, List[uint64]] = {}
self.iters_submitted: Dict[Chain, List[uint64]] = {}
self.iters_finished: Set = set()
# For each iteration submitted, know if it's a signage point, an infusion point or an end of slot.
self.iteration_to_proof_type: Dict[uint64, IterationType] = {}
# List of proofs finished.
self.proofs_finished: List[Tuple[Chain, VDFInfo, VDFProof, int]] = []
# Data to send at vdf_client initialization.
self.overflow_blocks: List[timelord_protocol.NewUnfinishedBlockTimelord] = []
# Incremented each time `reset_chains` has been called.
# Used to label proofs in `finished_proofs` and to only filter proofs corresponding to the most recent state.
self.num_resets: int = 0
multiprocessing_start_method = process_config_start_method(config=self.config, log=log)
self.multiprocessing_context = multiprocessing.get_context(method=multiprocessing_start_method)
self.process_communication_tasks: List[asyncio.Task] = []
self.main_loop = None
self.vdf_server = None
self._shut_down = False
self.vdf_failures: List[Tuple[Chain, Optional[int]]] = []
self.vdf_failures_count: int = 0
self.vdf_failure_time: float = 0
self.total_unfinished: int = 0
self.total_infused: int = 0
self.state_changed_callback: Optional[Callable] = None
self.bluebox_mode = self.config.get("bluebox_mode", False)
# Support backwards compatibility for the old `config.yaml` that has field `sanitizer_mode`.
if not self.bluebox_mode:
self.bluebox_mode = self.config.get("sanitizer_mode", False)
self.pending_bluebox_info: List[Tuple[float, timelord_protocol.RequestCompactProofOfTime]] = []
self.last_active_time = time.time()
self.bluebox_pool: Optional[ProcessPoolExecutor] = None
async def _start(self):
self.lock: asyncio.Lock = asyncio.Lock()
self.vdf_server = await asyncio.start_server(
self._handle_client,
self.config["vdf_server"]["host"],
int(self.config["vdf_server"]["port"]),
)
self.last_state: LastState = LastState(self.constants)
slow_bluebox = self.config.get("slow_bluebox", False)
if not self.bluebox_mode:
self.main_loop = asyncio.create_task(self._manage_chains())
else:
if os.name == "nt" or slow_bluebox:
# `vdf_client` doesn't build on windows, use `prove()` from chiavdf.
workers = self.config.get("slow_bluebox_process_count", 1)
self.bluebox_pool = ProcessPoolExecutor(
max_workers=workers,
mp_context=self.multiprocessing_context,
initializer=setproctitle,
initargs=(f"{getproctitle()}_worker",),
)
self.main_loop = asyncio.create_task(
self._start_manage_discriminant_queue_sanitizer_slow(self.bluebox_pool, workers)
)
else:
self.main_loop = asyncio.create_task(self._manage_discriminant_queue_sanitizer())
log.info(f"Started timelord, listening on port {self.get_vdf_server_port()}")
def get_connections(self, request_node_type: Optional[NodeType]) -> List[Dict[str, Any]]:
return default_get_connections(server=self.server, request_node_type=request_node_type)
async def on_connect(self, connection: ws.WSFlaxConnection):
pass
def get_vdf_server_port(self) -> Optional[uint16]:
if self.vdf_server is not None:
return self.vdf_server.sockets[0].getsockname()[1]
return None
def _close(self):
self._shut_down = True
for task in self.process_communication_tasks:
task.cancel()
if self.main_loop is not None:
self.main_loop.cancel()
if self.bluebox_pool is not None:
self.bluebox_pool.shutdown()
async def _await_closed(self):
pass
def _set_state_changed_callback(self, callback: Callable):
self.state_changed_callback = callback
def state_changed(self, change: str, change_data: Optional[Dict[str, Any]] = None):
if self.state_changed_callback is not None:
self.state_changed_callback(change, change_data)
def set_server(self, server: FlaxServer):
self._server = server
async def _handle_client(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
async with self.lock:
client_ip = writer.get_extra_info("peername")[0]
log.debug(f"New timelord connection from client: {client_ip}.")
if client_ip in self.ip_whitelist:
self.free_clients.append((client_ip, reader, writer))
log.debug(f"Added new VDF client {client_ip}.")
async def _stop_chain(self, chain: Chain):
try:
_, _, stop_writer = self.chain_type_to_stream[chain]
if chain in self.allows_iters:
stop_writer.write(b"010")
await stop_writer.drain()
self.allows_iters.remove(chain)
else:
log.error(f"Trying to stop {chain} before its initialization.")
stop_writer.close()
await stop_writer.wait_closed()
if chain not in self.unspawned_chains:
self.unspawned_chains.append(chain)
del self.chain_type_to_stream[chain]
except ConnectionResetError as e:
log.error(f"{e}")
except Exception as e:
log.error(f"Exception in stop chain: {type(e)} {e}")
def _can_infuse_unfinished_block(self, block: timelord_protocol.NewUnfinishedBlockTimelord) -> Optional[uint64]:
assert self.last_state is not None
sub_slot_iters = self.last_state.get_sub_slot_iters()
difficulty = self.last_state.get_difficulty()
ip_iters = self.last_state.get_last_ip()
rc_block = block.reward_chain_block
try:
block_sp_iters, block_ip_iters = iters_from_block(
self.constants,
rc_block,
sub_slot_iters,
difficulty,
)
except Exception as e:
log.warning(f"Received invalid unfinished block: {e}.")
return None
block_sp_total_iters = self.last_state.total_iters - ip_iters + block_sp_iters
if is_overflow_block(self.constants, block.reward_chain_block.signage_point_index):
block_sp_total_iters -= self.last_state.get_sub_slot_iters()
found_index = -1
for index, (rc, total_iters) in enumerate(self.last_state.reward_challenge_cache):
if rc == block.rc_prev:
found_index = index
break
if found_index == -1:
log.warning(f"Will not infuse {block.rc_prev} because its reward chain challenge is not in the chain")
return None
if ip_iters > block_ip_iters:
log.warning("Too late to infuse block")
return None
new_block_iters = uint64(block_ip_iters - ip_iters)
if len(self.last_state.reward_challenge_cache) > found_index + 1:
if self.last_state.reward_challenge_cache[found_index + 1][1] < block_sp_total_iters:
log.warning(
f"Will not infuse unfinished block {block.rc_prev} sp total iters {block_sp_total_iters}, "
f"because there is another infusion before its SP"
)
return None
if self.last_state.reward_challenge_cache[found_index][1] > block_sp_total_iters:
if not is_overflow_block(self.constants, block.reward_chain_block.signage_point_index):
log.error(
f"Will not infuse unfinished block {block.rc_prev}, sp total iters: {block_sp_total_iters}, "
f"because its iters are too low"
)
return None
if new_block_iters > 0:
return new_block_iters
return None
async def _reset_chains(self, first_run=False, only_eos=False):
# First, stop all chains.
self.last_active_time = time.time()
log.debug("Resetting chains")
ip_iters = self.last_state.get_last_ip()
sub_slot_iters = self.last_state.get_sub_slot_iters()
if not first_run:
for chain in list(self.chain_type_to_stream.keys()):
await self._stop_chain(chain)
# Adjust all signage points iterations to the peak.
iters_per_signage = uint64(sub_slot_iters // self.constants.NUM_SPS_SUB_SLOT)
self.signage_point_iters = [
(k * iters_per_signage - ip_iters, k)
for k in range(1, self.constants.NUM_SPS_SUB_SLOT)
if k * iters_per_signage - ip_iters > 0
]
for sp, k in self.signage_point_iters:
assert k * iters_per_signage > 0
assert k * iters_per_signage < sub_slot_iters
# Adjust all unfinished blocks iterations to the peak.
new_unfinished_blocks = []
self.iters_finished = set()
self.proofs_finished = []
self.num_resets += 1
for chain in [Chain.CHALLENGE_CHAIN, Chain.REWARD_CHAIN, Chain.INFUSED_CHALLENGE_CHAIN]:
self.iters_to_submit[chain] = []
self.iters_submitted[chain] = []
self.iteration_to_proof_type = {}
if not only_eos:
for block in self.unfinished_blocks + self.overflow_blocks:
new_block_iters: Optional[uint64] = self._can_infuse_unfinished_block(block)
# Does not add duplicates, or blocks that we cannot infuse
if new_block_iters and new_block_iters not in self.iters_to_submit[Chain.CHALLENGE_CHAIN]:
if block not in self.unfinished_blocks:
self.total_unfinished += 1
new_unfinished_blocks.append(block)
for chain in [Chain.REWARD_CHAIN, Chain.CHALLENGE_CHAIN]:
self.iters_to_submit[chain].append(new_block_iters)
if self.last_state.get_deficit() < self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK:
self.iters_to_submit[Chain.INFUSED_CHALLENGE_CHAIN].append(new_block_iters)
self.iteration_to_proof_type[new_block_iters] = IterationType.INFUSION_POINT
# Remove all unfinished blocks that have already passed.
self.unfinished_blocks = new_unfinished_blocks
# Signage points.
if not only_eos and len(self.signage_point_iters) > 0:
count_signage = 0
for signage, k in self.signage_point_iters:
for chain in [Chain.CHALLENGE_CHAIN, Chain.REWARD_CHAIN]:
self.iters_to_submit[chain].append(signage)
self.iteration_to_proof_type[signage] = IterationType.SIGNAGE_POINT
count_signage += 1
if count_signage == 3:
break
left_subslot_iters = sub_slot_iters - ip_iters
assert left_subslot_iters > 0
if self.last_state.get_deficit() < self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK:
self.iters_to_submit[Chain.INFUSED_CHALLENGE_CHAIN].append(left_subslot_iters)
self.iters_to_submit[Chain.CHALLENGE_CHAIN].append(left_subslot_iters)
self.iters_to_submit[Chain.REWARD_CHAIN].append(left_subslot_iters)
self.iteration_to_proof_type[left_subslot_iters] = IterationType.END_OF_SUBSLOT
for chain, iters in self.iters_to_submit.items():
for iteration in iters:
assert iteration > 0
async def _handle_new_peak(self):
assert self.new_peak is not None
self.last_state.set_state(self.new_peak)
if self.total_unfinished > 0:
remove_unfinished = []
for unf_block_timelord in self.unfinished_blocks + self.overflow_blocks:
if (
unf_block_timelord.reward_chain_block.get_hash()
== self.new_peak.reward_chain_block.get_unfinished().get_hash()
):
if unf_block_timelord not in self.unfinished_blocks:
# We never got the EOS for this, but we have the block in overflow list
self.total_unfinished += 1
remove_unfinished.append(unf_block_timelord)
if len(remove_unfinished) > 0:
self.total_infused += 1
for block in remove_unfinished:
if block in self.unfinished_blocks:
self.unfinished_blocks.remove(block)
if block in self.overflow_blocks:
self.overflow_blocks.remove(block)
infusion_rate = round(self.total_infused / self.total_unfinished * 100.0, 2)
log.info(
f"Total unfinished blocks: {self.total_unfinished}. "
f"Total infused blocks: {self.total_infused}. "
f"Infusion rate: {infusion_rate}%."
)
self.new_peak = None
await self._reset_chains()
async def _handle_subslot_end(self):
self.last_state.set_state(self.new_subslot_end)
for block in self.unfinished_blocks:
if self._can_infuse_unfinished_block(block) is not None:
self.total_unfinished += 1
self.new_subslot_end = None
await self._reset_chains()
async def _map_chains_with_vdf_clients(self):
while not self._shut_down:
picked_chain = None
async with self.lock:
if len(self.free_clients) == 0:
break
ip, reader, writer = self.free_clients[0]
for chain_type in self.unspawned_chains:
challenge = self.last_state.get_challenge(chain_type)
initial_form = self.last_state.get_initial_form(chain_type)
if challenge is not None and initial_form is not None:
picked_chain = chain_type
break
if picked_chain is None:
break
picked_chain = self.unspawned_chains[0]
self.chain_type_to_stream[picked_chain] = (ip, reader, writer)
self.free_clients = self.free_clients[1:]
self.unspawned_chains = self.unspawned_chains[1:]
self.chain_start_time[picked_chain] = time.time()
log.debug(f"Mapping free vdf_client with chain: {picked_chain}.")
self.process_communication_tasks.append(
asyncio.create_task(
self._do_process_communication(
picked_chain, challenge, initial_form, ip, reader, writer, proof_label=self.num_resets
)
)
)
async def _submit_iterations(self):
for chain in [Chain.CHALLENGE_CHAIN, Chain.REWARD_CHAIN, Chain.INFUSED_CHALLENGE_CHAIN]:
if chain in self.allows_iters:
_, _, writer = self.chain_type_to_stream[chain]
for iteration in self.iters_to_submit[chain]:
if iteration in self.iters_submitted[chain]:
continue
log.debug(f"Submitting iterations to {chain}: {iteration}")
assert iteration > 0
prefix = str(len(str(iteration)))
if len(str(iteration)) < 10:
prefix = "0" + prefix
iter_str = prefix + str(iteration)
writer.write(iter_str.encode())
await writer.drain()
self.iters_submitted[chain].append(iteration)
def _clear_proof_list(self, iters: uint64):
return [
(chain, info, proof, label)
for chain, info, proof, label in self.proofs_finished
if info.number_of_iterations != iters
]
async def _check_for_new_sp(self, iter_to_look_for: uint64):
signage_iters = [
iteration for iteration, t in self.iteration_to_proof_type.items() if t == IterationType.SIGNAGE_POINT
]
if len(signage_iters) == 0:
return None
to_remove = []
for potential_sp_iters, signage_point_index in self.signage_point_iters:
if potential_sp_iters not in signage_iters or potential_sp_iters != iter_to_look_for:
continue
signage_iter = potential_sp_iters
proofs_with_iter = [
(chain, info, proof)
for chain, info, proof, label in self.proofs_finished
if info.number_of_iterations == signage_iter and label == self.num_resets
]
# Wait for both cc and rc to have the signage point.
if len(proofs_with_iter) == 2:
cc_info: Optional[VDFInfo] = None
cc_proof: Optional[VDFProof] = None
rc_info: Optional[VDFInfo] = None
rc_proof: Optional[VDFProof] = None
for chain, info, proof in proofs_with_iter:
if chain == Chain.CHALLENGE_CHAIN:
cc_info = info
cc_proof = proof
if chain == Chain.REWARD_CHAIN:
rc_info = info
rc_proof = proof
if cc_info is None or cc_proof is None or rc_info is None or rc_proof is None:
log.error(f"Insufficient signage point data {signage_iter}")
continue
self.iters_finished.add(iter_to_look_for)
self.last_active_time = time.time()
rc_challenge = self.last_state.get_challenge(Chain.REWARD_CHAIN)
if rc_info.challenge != rc_challenge:
assert rc_challenge is not None
log.warning(f"SP: Do not have correct challenge {rc_challenge.hex()}" f" has {rc_info.challenge}")
# This proof is on an outdated challenge, so don't use it
continue
iters_from_sub_slot_start = cc_info.number_of_iterations + self.last_state.get_last_ip()
response = timelord_protocol.NewSignagePointVDF(
signage_point_index,
dataclasses.replace(cc_info, number_of_iterations=iters_from_sub_slot_start),
cc_proof,
rc_info,
rc_proof,
)
if self._server is not None:
msg = make_msg(ProtocolMessageTypes.new_signage_point_vdf, response)
await self.server.send_to_all([msg], NodeType.FULL_NODE)
# Cleanup the signage point from memory.
to_remove.append((signage_iter, signage_point_index))
self.proofs_finished = self._clear_proof_list(signage_iter)
# Send the next 3 signage point to the chains.
next_iters_count = 0
for next_sp, k in self.signage_point_iters:
for chain in [Chain.CHALLENGE_CHAIN, Chain.REWARD_CHAIN]:
if next_sp not in self.iters_submitted[chain] and next_sp not in self.iters_to_submit[chain]:
self.iters_to_submit[chain].append(next_sp)
self.iteration_to_proof_type[next_sp] = IterationType.SIGNAGE_POINT
next_iters_count += 1
if next_iters_count == 3:
break
# Break so we alternate between checking SP and IP
break
for r in to_remove:
self.signage_point_iters.remove(r)
async def _check_for_new_ip(self, iter_to_look_for: uint64):
if len(self.unfinished_blocks) == 0:
return None
infusion_iters = [
iteration for iteration, t in self.iteration_to_proof_type.items() if t == IterationType.INFUSION_POINT
]
for iteration in infusion_iters:
if iteration != iter_to_look_for:
continue
proofs_with_iter = [
(chain, info, proof)
for chain, info, proof, label in self.proofs_finished
if info.number_of_iterations == iteration and label == self.num_resets
]
if self.last_state.get_challenge(Chain.INFUSED_CHALLENGE_CHAIN) is not None:
chain_count = 3
else:
chain_count = 2
if len(proofs_with_iter) == chain_count:
block = None
ip_iters = None
for unfinished_block in self.unfinished_blocks:
try:
_, ip_iters = iters_from_block(
self.constants,
unfinished_block.reward_chain_block,
self.last_state.get_sub_slot_iters(),
self.last_state.get_difficulty(),
)
except Exception as e:
log.error(f"Error {e}")
continue
if ip_iters - self.last_state.get_last_ip() == iteration:
block = unfinished_block
break
assert ip_iters is not None
if block is not None:
ip_total_iters = self.last_state.get_total_iters() + iteration
challenge = block.reward_chain_block.get_hash()
icc_info: Optional[VDFInfo] = None
icc_proof: Optional[VDFProof] = None
cc_info: Optional[VDFInfo] = None
cc_proof: Optional[VDFProof] = None
rc_info: Optional[VDFInfo] = None
rc_proof: Optional[VDFProof] = None
for chain, info, proof in proofs_with_iter:
if chain == Chain.CHALLENGE_CHAIN:
cc_info = info
cc_proof = proof
if chain == Chain.REWARD_CHAIN:
rc_info = info
rc_proof = proof
if chain == Chain.INFUSED_CHALLENGE_CHAIN:
icc_info = info
icc_proof = proof
if cc_info is None or cc_proof is None or rc_info is None or rc_proof is None:
log.error(f"Insufficient VDF proofs for infusion point ch: {challenge} iterations:{iteration}")
return None
rc_challenge = self.last_state.get_challenge(Chain.REWARD_CHAIN)
if rc_info.challenge != rc_challenge:
assert rc_challenge is not None
log.warning(
f"Do not have correct challenge {rc_challenge.hex()} "
f"has {rc_info.challenge}, partial hash {block.reward_chain_block.get_hash()}"
)
# This proof is on an outdated challenge, so don't use it
continue
self.iters_finished.add(iter_to_look_for)
self.last_active_time = time.time()
log.debug(f"Generated infusion point for challenge: {challenge} iterations: {iteration}.")
overflow = is_overflow_block(self.constants, block.reward_chain_block.signage_point_index)
if not self.last_state.can_infuse_block(overflow):
log.warning("Too many blocks, or overflow in new epoch, cannot infuse, discarding")
return None
cc_info = dataclasses.replace(cc_info, number_of_iterations=ip_iters)
response = timelord_protocol.NewInfusionPointVDF(
challenge,
cc_info,
cc_proof,
rc_info,
rc_proof,
icc_info,
icc_proof,
)
msg = make_msg(ProtocolMessageTypes.new_infusion_point_vdf, response)
if self._server is not None:
await self.server.send_to_all([msg], NodeType.FULL_NODE)
self.proofs_finished = self._clear_proof_list(iteration)
if (
self.last_state.get_last_block_total_iters() is None
and not self.last_state.state_type == StateType.FIRST_SUB_SLOT
):
# We don't know when the last block was, so we can't make peaks
return None
sp_total_iters = (
ip_total_iters
- ip_iters
+ calculate_sp_iters(
self.constants,
block.sub_slot_iters,
block.reward_chain_block.signage_point_index,
)
- (block.sub_slot_iters if overflow else 0)
)
if self.last_state.state_type == StateType.FIRST_SUB_SLOT:
is_transaction_block = True
height: uint32 = uint32(0)
else:
last_block_ti = self.last_state.get_last_block_total_iters()
assert last_block_ti is not None
is_transaction_block = last_block_ti < sp_total_iters
height = uint32(self.last_state.get_height() + 1)
if height < 5:
# Don't directly update our state for the first few blocks, because we cannot validate
# whether the pre-farm is correct
return None
new_reward_chain_block = RewardChainBlock(
uint128(self.last_state.get_weight() + block.difficulty),
height,
uint128(ip_total_iters),
block.reward_chain_block.signage_point_index,
block.reward_chain_block.pos_ss_cc_challenge_hash,
block.reward_chain_block.proof_of_space,
block.reward_chain_block.challenge_chain_sp_vdf,
block.reward_chain_block.challenge_chain_sp_signature,
cc_info,
block.reward_chain_block.reward_chain_sp_vdf,
block.reward_chain_block.reward_chain_sp_signature,
rc_info,
icc_info,
is_transaction_block,
)
if self.last_state.state_type == StateType.FIRST_SUB_SLOT:
# Genesis
new_deficit = self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK - 1
elif overflow and self.last_state.deficit == self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK:
if self.last_state.peak is not None:
assert self.last_state.subslot_end is None
# This means the previous block is also an overflow block, and did not manage
# to lower the deficit, therefore we cannot lower it either. (new slot)
new_deficit = self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK
else:
# This means we are the first infusion in this sub-slot. This may be a new slot or not.
assert self.last_state.subslot_end is not None
if self.last_state.subslot_end.infused_challenge_chain is None:
# There is no ICC, which means we are not finishing a slot. We can reduce the deficit.
new_deficit = self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK - 1
else:
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | true |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/timelord/iters_from_block.py | flax/timelord/iters_from_block.py | from __future__ import annotations
from typing import Optional, Tuple, Union
from flax.consensus.pot_iterations import calculate_ip_iters, calculate_iterations_quality, calculate_sp_iters
from flax.types.blockchain_format.reward_chain_block import RewardChainBlock, RewardChainBlockUnfinished
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.ints import uint64
def iters_from_block(
constants,
reward_chain_block: Union[RewardChainBlock, RewardChainBlockUnfinished],
sub_slot_iters: uint64,
difficulty: uint64,
) -> Tuple[uint64, uint64]:
if reward_chain_block.challenge_chain_sp_vdf is None:
assert reward_chain_block.signage_point_index == 0
cc_sp: bytes32 = reward_chain_block.pos_ss_cc_challenge_hash
else:
cc_sp = reward_chain_block.challenge_chain_sp_vdf.output.get_hash()
quality_string: Optional[bytes32] = reward_chain_block.proof_of_space.verify_and_get_quality_string(
constants,
reward_chain_block.pos_ss_cc_challenge_hash,
cc_sp,
)
assert quality_string is not None
required_iters: uint64 = calculate_iterations_quality(
constants.DIFFICULTY_CONSTANT_FACTOR,
quality_string,
reward_chain_block.proof_of_space.size,
difficulty,
cc_sp,
)
return (
calculate_sp_iters(constants, sub_slot_iters, reward_chain_block.signage_point_index),
calculate_ip_iters(
constants,
sub_slot_iters,
reward_chain_block.signage_point_index,
required_iters,
),
)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/timelord/timelord_state.py | flax/timelord/timelord_state.py | from __future__ import annotations
import logging
from typing import List, Optional, Tuple, Union
from flax.consensus.constants import ConsensusConstants
from flax.protocols import timelord_protocol
from flax.timelord.iters_from_block import iters_from_block
from flax.timelord.types import Chain, StateType
from flax.types.blockchain_format.classgroup import ClassgroupElement
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.blockchain_format.slots import ChallengeBlockInfo
from flax.types.blockchain_format.sub_epoch_summary import SubEpochSummary
from flax.types.end_of_slot_bundle import EndOfSubSlotBundle
from flax.util.ints import uint8, uint32, uint64, uint128
log = logging.getLogger(__name__)
class LastState:
"""
Represents the state that the timelord is in, and should execute VDFs on top of. A state can be one of three types:
1. A "peak" or a block
2. An end of sub-slot
3. None, if it's the first sub-slot and there are no blocks yet
Timelords execute VDFs until they reach the next block or sub-slot, at which point the state is changed again.
The state can also be changed arbitrarily to a sub-slot or peak, for example in the case the timelord receives
a new block in the future.
"""
def __init__(self, constants: ConsensusConstants):
self.state_type: StateType = StateType.FIRST_SUB_SLOT
self.peak: Optional[timelord_protocol.NewPeakTimelord] = None
self.subslot_end: Optional[EndOfSubSlotBundle] = None
self.last_ip: uint64 = uint64(0)
self.deficit: uint8 = constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK
self.sub_epoch_summary: Optional[SubEpochSummary] = None
self.constants: ConsensusConstants = constants
self.last_weight: uint128 = uint128(0)
self.last_height: uint32 = uint32(0)
self.total_iters: uint128 = uint128(0)
self.last_challenge_sb_or_eos_total_iters = uint128(0)
self.last_block_total_iters: Optional[uint128] = None
self.last_peak_challenge: bytes32 = constants.GENESIS_CHALLENGE
self.difficulty: uint64 = constants.DIFFICULTY_STARTING
self.sub_slot_iters: uint64 = constants.SUB_SLOT_ITERS_STARTING
self.reward_challenge_cache: List[Tuple[bytes32, uint128]] = [(constants.GENESIS_CHALLENGE, uint128(0))]
self.new_epoch = False
self.passed_ses_height_but_not_yet_included = False
self.infused_ses = False
def set_state(self, state: Union[timelord_protocol.NewPeakTimelord, EndOfSubSlotBundle]):
if isinstance(state, timelord_protocol.NewPeakTimelord):
self.state_type = StateType.PEAK
self.peak = state
self.subslot_end = None
_, self.last_ip = iters_from_block(
self.constants,
state.reward_chain_block,
state.sub_slot_iters,
state.difficulty,
)
self.deficit = state.deficit
self.sub_epoch_summary = state.sub_epoch_summary
self.last_weight = state.reward_chain_block.weight
self.last_height = state.reward_chain_block.height
self.total_iters = state.reward_chain_block.total_iters
self.last_peak_challenge = state.reward_chain_block.get_hash()
self.difficulty = state.difficulty
self.sub_slot_iters = state.sub_slot_iters
if state.reward_chain_block.is_transaction_block:
self.last_block_total_iters = self.total_iters
self.reward_challenge_cache = state.previous_reward_challenges
self.last_challenge_sb_or_eos_total_iters = self.peak.last_challenge_sb_or_eos_total_iters
self.new_epoch = False
if (self.peak.reward_chain_block.height + 1) % self.constants.SUB_EPOCH_BLOCKS == 0:
self.passed_ses_height_but_not_yet_included = True
else:
self.passed_ses_height_but_not_yet_included = state.passes_ses_height_but_not_yet_included
elif isinstance(state, EndOfSubSlotBundle):
self.state_type = StateType.END_OF_SUB_SLOT
if self.peak is not None:
self.total_iters = uint128(self.total_iters - self.get_last_ip() + self.sub_slot_iters)
else:
self.total_iters = uint128(self.total_iters + self.sub_slot_iters)
self.peak = None
self.subslot_end = state
self.last_ip = uint64(0)
self.deficit = state.reward_chain.deficit
if state.challenge_chain.new_difficulty is not None:
assert state.challenge_chain.new_sub_slot_iters is not None
self.difficulty = state.challenge_chain.new_difficulty
self.sub_slot_iters = state.challenge_chain.new_sub_slot_iters
self.new_epoch = True
else:
self.new_epoch = False
if state.challenge_chain.subepoch_summary_hash is not None:
self.infused_ses = True
self.passed_ses_height_but_not_yet_included = False
else:
self.infused_ses = False
# Since we have a new sub slot which is not an end of subepoch,
# we will use the last value that we saw for
# passed_ses_height_but_not_yet_included
self.last_challenge_sb_or_eos_total_iters = self.total_iters
else:
assert False
reward_challenge: Optional[bytes32] = self.get_challenge(Chain.REWARD_CHAIN)
assert reward_challenge is not None # Reward chain always has VDFs
self.reward_challenge_cache.append((reward_challenge, self.total_iters))
log.info(f"Updated timelord peak to {reward_challenge}, total iters: {self.total_iters}")
while len(self.reward_challenge_cache) > 2 * self.constants.MAX_SUB_SLOT_BLOCKS:
self.reward_challenge_cache.pop(0)
def get_sub_slot_iters(self) -> uint64:
return self.sub_slot_iters
def can_infuse_block(self, overflow: bool) -> bool:
if overflow and self.new_epoch:
# No overflows in new epoch
return False
if self.state_type == StateType.FIRST_SUB_SLOT or self.state_type == StateType.END_OF_SUB_SLOT:
return True
ss_start_iters = self.get_total_iters() - self.get_last_ip()
already_infused_count: int = 0
for _, total_iters in self.reward_challenge_cache:
if total_iters > ss_start_iters:
already_infused_count += 1
if already_infused_count >= self.constants.MAX_SUB_SLOT_BLOCKS:
return False
return True
def get_weight(self) -> uint128:
return self.last_weight
def get_height(self) -> uint32:
return self.last_height
def get_total_iters(self) -> uint128:
return self.total_iters
def get_last_peak_challenge(self) -> Optional[bytes32]:
return self.last_peak_challenge
def get_difficulty(self) -> uint64:
return self.difficulty
def get_last_ip(self) -> uint64:
return self.last_ip
def get_deficit(self) -> uint8:
return self.deficit
def just_infused_sub_epoch_summary(self) -> bool:
"""
Returns true if state is an end of sub-slot, and that end of sub-slot infused a sub epoch summary
"""
return self.state_type == StateType.END_OF_SUB_SLOT and self.infused_ses
def get_next_sub_epoch_summary(self) -> Optional[SubEpochSummary]:
if self.state_type == StateType.FIRST_SUB_SLOT or self.state_type == StateType.END_OF_SUB_SLOT:
# Can only infuse SES after a peak (in an end of sub slot)
return None
assert self.peak is not None
if self.passed_ses_height_but_not_yet_included and self.get_deficit() == 0:
# This will mean we will include the ses in the next sub-slot
return self.sub_epoch_summary
return None
def get_last_block_total_iters(self) -> Optional[uint128]:
return self.last_block_total_iters
def get_passed_ses_height_but_not_yet_included(self) -> bool:
return self.passed_ses_height_but_not_yet_included
def get_challenge(self, chain: Chain) -> Optional[bytes32]:
if self.state_type == StateType.FIRST_SUB_SLOT:
assert self.peak is None and self.subslot_end is None
if chain == Chain.CHALLENGE_CHAIN:
return self.constants.GENESIS_CHALLENGE
elif chain == Chain.REWARD_CHAIN:
return self.constants.GENESIS_CHALLENGE
elif chain == Chain.INFUSED_CHALLENGE_CHAIN:
return None
elif self.state_type == StateType.PEAK:
assert self.peak is not None
reward_chain_block = self.peak.reward_chain_block
if chain == Chain.CHALLENGE_CHAIN:
return reward_chain_block.challenge_chain_ip_vdf.challenge
elif chain == Chain.REWARD_CHAIN:
return reward_chain_block.get_hash()
elif chain == Chain.INFUSED_CHALLENGE_CHAIN:
if reward_chain_block.infused_challenge_chain_ip_vdf is not None:
return reward_chain_block.infused_challenge_chain_ip_vdf.challenge
elif self.peak.deficit == self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK - 1:
return ChallengeBlockInfo(
reward_chain_block.proof_of_space,
reward_chain_block.challenge_chain_sp_vdf,
reward_chain_block.challenge_chain_sp_signature,
reward_chain_block.challenge_chain_ip_vdf,
).get_hash()
return None
elif self.state_type == StateType.END_OF_SUB_SLOT:
assert self.subslot_end is not None
if chain == Chain.CHALLENGE_CHAIN:
return self.subslot_end.challenge_chain.get_hash()
elif chain == Chain.REWARD_CHAIN:
return self.subslot_end.reward_chain.get_hash()
elif chain == Chain.INFUSED_CHALLENGE_CHAIN:
if self.subslot_end.reward_chain.deficit < self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK:
assert self.subslot_end.infused_challenge_chain is not None
return self.subslot_end.infused_challenge_chain.get_hash()
return None
return None
def get_initial_form(self, chain: Chain) -> Optional[ClassgroupElement]:
if self.state_type == StateType.FIRST_SUB_SLOT:
return ClassgroupElement.get_default_element()
elif self.state_type == StateType.PEAK:
assert self.peak is not None
reward_chain_block = self.peak.reward_chain_block
if chain == Chain.CHALLENGE_CHAIN:
return reward_chain_block.challenge_chain_ip_vdf.output
if chain == Chain.REWARD_CHAIN:
return ClassgroupElement.get_default_element()
if chain == Chain.INFUSED_CHALLENGE_CHAIN:
if reward_chain_block.infused_challenge_chain_ip_vdf is not None:
return reward_chain_block.infused_challenge_chain_ip_vdf.output
elif self.peak.deficit == self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK - 1:
return ClassgroupElement.get_default_element()
else:
return None
elif self.state_type == StateType.END_OF_SUB_SLOT:
if chain == Chain.CHALLENGE_CHAIN or chain == Chain.REWARD_CHAIN:
return ClassgroupElement.get_default_element()
if chain == Chain.INFUSED_CHALLENGE_CHAIN:
assert self.subslot_end is not None
if self.subslot_end.reward_chain.deficit < self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK:
return ClassgroupElement.get_default_element()
else:
return None
return None
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/timelord/timelord_launcher.py | flax/timelord/timelord_launcher.py | import asyncio
import logging
import pathlib
import signal
import time
import os
from typing import Dict, List, Optional
import pkg_resources
from flax.util.flax_logging import initialize_logging
from flax.util.config import load_config
from flax.util.default_root import DEFAULT_ROOT_PATH
from flax.util.network import get_host_addr
from flax.util.setproctitle import setproctitle
active_processes: List = []
stopped = False
log = logging.getLogger(__name__)
async def kill_processes(lock: asyncio.Lock):
global stopped
global active_processes
async with lock:
stopped = True
for process in active_processes:
try:
process.kill()
except ProcessLookupError:
pass
def find_vdf_client() -> pathlib.Path:
p = pathlib.Path(pkg_resources.get_distribution("chiavdf").location) / "vdf_client"
if p.is_file():
return p
raise FileNotFoundError("can't find vdf_client binary")
async def spawn_process(host: str, port: int, counter: int, lock: asyncio.Lock, prefer_ipv6: Optional[bool]):
global stopped
global active_processes
path_to_vdf_client = find_vdf_client()
first_10_seconds = True
start_time = time.time()
while not stopped:
try:
dirname = path_to_vdf_client.parent
basename = path_to_vdf_client.name
resolved = get_host_addr(host, prefer_ipv6)
proc = await asyncio.create_subprocess_shell(
f"{basename} {resolved} {port} {counter}",
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
env={"PATH": os.fspath(dirname)},
)
except Exception as e:
log.warning(f"Exception while spawning process {counter}: {(e)}")
continue
async with lock:
active_processes.append(proc)
stdout, stderr = await proc.communicate()
if stdout:
log.info(f"VDF client {counter}: {stdout.decode().rstrip()}")
if stderr:
if first_10_seconds:
if time.time() - start_time > 10:
first_10_seconds = False
else:
log.error(f"VDF client {counter}: {stderr.decode().rstrip()}")
log.info(f"Process number {counter} ended.")
async with lock:
if proc in active_processes:
active_processes.remove(proc)
await asyncio.sleep(0.1)
async def spawn_all_processes(config: Dict, net_config: Dict, lock: asyncio.Lock):
await asyncio.sleep(5)
hostname = net_config["self_hostname"] if "host" not in config else config["host"]
port = config["port"]
process_count = config["process_count"]
if process_count == 0:
log.info("Process_count set to 0, stopping TLauncher.")
return
awaitables = [spawn_process(hostname, port, i, lock, net_config.get("prefer_ipv6")) for i in range(process_count)]
await asyncio.gather(*awaitables)
def signal_received(lock: asyncio.Lock):
asyncio.create_task(kill_processes(lock))
async def async_main(config, net_config):
loop = asyncio.get_running_loop()
lock = asyncio.Lock()
try:
loop.add_signal_handler(signal.SIGINT, signal_received, lock)
loop.add_signal_handler(signal.SIGTERM, signal_received, lock)
except NotImplementedError:
log.info("signal handlers unsupported")
try:
await spawn_all_processes(config, net_config, lock)
finally:
log.info("Launcher fully closed.")
def main():
if os.name == "nt":
log.info("Timelord launcher not supported on Windows.")
return
root_path = DEFAULT_ROOT_PATH
setproctitle("flax_timelord_launcher")
net_config = load_config(root_path, "config.yaml")
config = net_config["timelord_launcher"]
initialize_logging("TLauncher", config["logging"], root_path)
asyncio.run(async_main(config=config, net_config=net_config))
if __name__ == "__main__":
main()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/timelord/__init__.py | flax/timelord/__init__.py | python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false | |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/timelord/types.py | flax/timelord/types.py | from __future__ import annotations
from enum import Enum
class Chain(Enum):
CHALLENGE_CHAIN = 1
REWARD_CHAIN = 2
INFUSED_CHALLENGE_CHAIN = 3
BLUEBOX = 4
class IterationType(Enum):
SIGNAGE_POINT = 1
INFUSION_POINT = 2
END_OF_SUBSLOT = 3
class StateType(Enum):
PEAK = 1
END_OF_SUB_SLOT = 2
FIRST_SUB_SLOT = 3
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/timelord/timelord_api.py | flax/timelord/timelord_api.py | from __future__ import annotations
import logging
import time
from typing import Callable, Optional
from flax.protocols import timelord_protocol
from flax.timelord.timelord import Chain, IterationType, Timelord, iters_from_block
from flax.util.api_decorators import api_request
from flax.util.ints import uint64
log = logging.getLogger(__name__)
class TimelordAPI:
timelord: Timelord
def __init__(self, timelord) -> None:
self.timelord = timelord
def _set_state_changed_callback(self, callback: Callable):
self.timelord.state_changed_callback = callback
@api_request
async def new_peak_timelord(self, new_peak: timelord_protocol.NewPeakTimelord):
if self.timelord.last_state is None:
return None
async with self.timelord.lock:
if self.timelord.bluebox_mode:
return None
if new_peak.reward_chain_block.weight > self.timelord.last_state.get_weight():
log.info("Not skipping peak, don't have. Maybe we are not the fastest timelord")
log.info(
f"New peak: height: {new_peak.reward_chain_block.height} weight: "
f"{new_peak.reward_chain_block.weight} "
)
self.timelord.new_peak = new_peak
self.timelord.state_changed("new_peak", {"height": new_peak.reward_chain_block.height})
elif (
self.timelord.last_state.peak is not None
and self.timelord.last_state.peak.reward_chain_block == new_peak.reward_chain_block
):
log.info("Skipping peak, already have.")
self.timelord.state_changed("skipping_peak", {"height": new_peak.reward_chain_block.height})
return None
else:
log.warning("block that we don't have, changing to it.")
self.timelord.new_peak = new_peak
self.timelord.state_changed("new_peak", {"height": new_peak.reward_chain_block.height})
self.timelord.new_subslot_end = None
@api_request
async def new_unfinished_block_timelord(self, new_unfinished_block: timelord_protocol.NewUnfinishedBlockTimelord):
if self.timelord.last_state is None:
return None
async with self.timelord.lock:
if self.timelord.bluebox_mode:
return None
try:
sp_iters, ip_iters = iters_from_block(
self.timelord.constants,
new_unfinished_block.reward_chain_block,
self.timelord.last_state.get_sub_slot_iters(),
self.timelord.last_state.get_difficulty(),
)
except Exception:
return None
last_ip_iters = self.timelord.last_state.get_last_ip()
if sp_iters > ip_iters:
self.timelord.overflow_blocks.append(new_unfinished_block)
log.debug(f"Overflow unfinished block, total {self.timelord.total_unfinished}")
elif ip_iters > last_ip_iters:
new_block_iters: Optional[uint64] = self.timelord._can_infuse_unfinished_block(new_unfinished_block)
if new_block_iters:
self.timelord.unfinished_blocks.append(new_unfinished_block)
for chain in [Chain.REWARD_CHAIN, Chain.CHALLENGE_CHAIN]:
self.timelord.iters_to_submit[chain].append(new_block_iters)
if self.timelord.last_state.get_deficit() < self.timelord.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK:
self.timelord.iters_to_submit[Chain.INFUSED_CHALLENGE_CHAIN].append(new_block_iters)
self.timelord.iteration_to_proof_type[new_block_iters] = IterationType.INFUSION_POINT
self.timelord.total_unfinished += 1
log.debug(f"Non-overflow unfinished block, total {self.timelord.total_unfinished}")
@api_request
async def request_compact_proof_of_time(self, vdf_info: timelord_protocol.RequestCompactProofOfTime):
async with self.timelord.lock:
if not self.timelord.bluebox_mode:
return None
now = time.time()
# work older than 5s can safely be assumed to be from the previous batch, and needs to be cleared
while self.timelord.pending_bluebox_info and (now - self.timelord.pending_bluebox_info[0][0] > 5):
del self.timelord.pending_bluebox_info[0]
self.timelord.pending_bluebox_info.append((now, vdf_info))
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/seeder/crawler_api.py | flax/seeder/crawler_api.py | from __future__ import annotations
from typing import Optional
import flax.server.ws_connection as ws
from flax.protocols import full_node_protocol, wallet_protocol
from flax.seeder.crawler import Crawler
from flax.server.outbound_message import Message
from flax.server.server import FlaxServer
from flax.util.api_decorators import api_request, peer_required
class CrawlerAPI:
crawler: Crawler
def __init__(self, crawler):
self.crawler = crawler
def __getattr__(self, attr_name: str):
async def invoke(*args, **kwargs):
pass
return invoke
@property
def server(self) -> FlaxServer:
assert self.crawler.server is not None
return self.crawler.server
@property
def log(self):
return self.crawler.log
@peer_required
@api_request
async def request_peers(self, _request: full_node_protocol.RequestPeers, peer: ws.WSFlaxConnection):
pass
@peer_required
@api_request
async def respond_peers(
self, request: full_node_protocol.RespondPeers, peer: ws.WSFlaxConnection
) -> Optional[Message]:
pass
@peer_required
@api_request
async def new_peak(self, request: full_node_protocol.NewPeak, peer: ws.WSFlaxConnection) -> Optional[Message]:
await self.crawler.new_peak(request, peer)
return None
@api_request
async def new_transaction(self, transaction: full_node_protocol.NewTransaction) -> Optional[Message]:
pass
@api_request
@peer_required
async def new_signage_point_or_end_of_sub_slot(
self, new_sp: full_node_protocol.NewSignagePointOrEndOfSubSlot, peer: ws.WSFlaxConnection
) -> Optional[Message]:
pass
@api_request
async def new_unfinished_block(
self, new_unfinished_block: full_node_protocol.NewUnfinishedBlock
) -> Optional[Message]:
pass
@peer_required
@api_request
async def new_compact_vdf(self, request: full_node_protocol.NewCompactVDF, peer: ws.WSFlaxConnection):
pass
@api_request
async def request_transaction(self, request: full_node_protocol.RequestTransaction) -> Optional[Message]:
pass
@api_request
async def request_proof_of_weight(self, request: full_node_protocol.RequestProofOfWeight) -> Optional[Message]:
pass
@api_request
async def request_block(self, request: full_node_protocol.RequestBlock) -> Optional[Message]:
pass
@api_request
async def request_blocks(self, request: full_node_protocol.RequestBlocks) -> Optional[Message]:
pass
@api_request
async def request_unfinished_block(
self, request_unfinished_block: full_node_protocol.RequestUnfinishedBlock
) -> Optional[Message]:
pass
@api_request
async def request_signage_point_or_end_of_sub_slot(
self, request: full_node_protocol.RequestSignagePointOrEndOfSubSlot
) -> Optional[Message]:
pass
@peer_required
@api_request
async def request_mempool_transactions(
self,
request: full_node_protocol.RequestMempoolTransactions,
peer: ws.WSFlaxConnection,
) -> Optional[Message]:
pass
@api_request
async def request_block_header(self, request: wallet_protocol.RequestBlockHeader) -> Optional[Message]:
pass
@api_request
async def request_additions(self, request: wallet_protocol.RequestAdditions) -> Optional[Message]:
pass
@api_request
async def request_removals(self, request: wallet_protocol.RequestRemovals) -> Optional[Message]:
pass
@api_request
async def request_puzzle_solution(self, request: wallet_protocol.RequestPuzzleSolution) -> Optional[Message]:
pass
@api_request
async def request_header_blocks(self, request: wallet_protocol.RequestHeaderBlocks) -> Optional[Message]:
pass
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/seeder/crawl_store.py | flax/seeder/crawl_store.py | import asyncio
import dataclasses
import ipaddress
import logging
import random
import time
from typing import List, Dict
import aiosqlite
from flax.seeder.peer_record import PeerRecord, PeerReliability
log = logging.getLogger(__name__)
class CrawlStore:
crawl_db: aiosqlite.Connection
last_timestamp: int
lock: asyncio.Lock
host_to_records: Dict
host_to_selected_time: Dict
host_to_reliability: Dict
banned_peers: int
ignored_peers: int
reliable_peers: int
@classmethod
async def create(cls, connection: aiosqlite.Connection):
self = cls()
self.crawl_db = connection
await self.crawl_db.execute(
(
"CREATE TABLE IF NOT EXISTS peer_records("
" peer_id text PRIMARY KEY,"
" ip_address text,"
" port bigint,"
" connected int,"
" last_try_timestamp bigint,"
" try_count bigint,"
" connected_timestamp bigint,"
" added_timestamp bigint,"
" best_timestamp bigint,"
" version text,"
" handshake_time text"
" tls_version text)"
)
)
await self.crawl_db.execute(
(
"CREATE TABLE IF NOT EXISTS peer_reliability("
" peer_id text PRIMARY KEY,"
" ignore_till int, ban_till int,"
" stat_2h_w real, stat_2h_c real, stat_2h_r real,"
" stat_8h_w real, stat_8h_c real, stat_8h_r real,"
" stat_1d_w real, stat_1d_c real, stat_1d_r real,"
" stat_1w_w real, stat_1w_c real, stat_1w_r real,"
" stat_1m_w real, stat_1m_c real, stat_1m_r real,"
" tries int, successes int)"
)
)
try:
await self.crawl_db.execute("ALTER TABLE peer_records ADD COLUMN tls_version text")
except aiosqlite.OperationalError:
pass # ignore what is likely Duplicate column error
await self.crawl_db.execute(("CREATE TABLE IF NOT EXISTS good_peers(ip text)"))
await self.crawl_db.execute("CREATE INDEX IF NOT EXISTS ip_address on peer_records(ip_address)")
await self.crawl_db.execute("CREATE INDEX IF NOT EXISTS port on peer_records(port)")
await self.crawl_db.execute("CREATE INDEX IF NOT EXISTS connected on peer_records(connected)")
await self.crawl_db.execute("CREATE INDEX IF NOT EXISTS added_timestamp on peer_records(added_timestamp)")
await self.crawl_db.execute("CREATE INDEX IF NOT EXISTS peer_id on peer_reliability(peer_id)")
await self.crawl_db.execute("CREATE INDEX IF NOT EXISTS ignore_till on peer_reliability(ignore_till)")
await self.crawl_db.commit()
self.last_timestamp = 0
self.ignored_peers = 0
self.banned_peers = 0
self.reliable_peers = 0
self.host_to_selected_time = {}
await self.unload_from_db()
return self
def maybe_add_peer(self, peer_record: PeerRecord, peer_reliability: PeerReliability):
if peer_record.peer_id not in self.host_to_records:
self.host_to_records[peer_record.peer_id] = peer_record
if peer_reliability.peer_id not in self.host_to_reliability:
self.host_to_reliability[peer_reliability.peer_id] = peer_reliability
async def add_peer(self, peer_record: PeerRecord, peer_reliability: PeerReliability, save_db: bool = False):
if not save_db:
self.host_to_records[peer_record.peer_id] = peer_record
self.host_to_reliability[peer_reliability.peer_id] = peer_reliability
return
added_timestamp = int(time.time())
cursor = await self.crawl_db.execute(
"INSERT OR REPLACE INTO peer_records VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(
peer_record.peer_id,
peer_record.ip_address,
peer_record.port,
int(peer_record.connected),
peer_record.last_try_timestamp,
peer_record.try_count,
peer_record.connected_timestamp,
added_timestamp,
peer_record.best_timestamp,
peer_record.version,
peer_record.handshake_time,
peer_record.tls_version,
),
)
await cursor.close()
cursor = await self.crawl_db.execute(
"INSERT OR REPLACE INTO peer_reliability"
" VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(
peer_reliability.peer_id,
peer_reliability.ignore_till,
peer_reliability.ban_till,
peer_reliability.stat_2h.weight,
peer_reliability.stat_2h.count,
peer_reliability.stat_2h.reliability,
peer_reliability.stat_8h.weight,
peer_reliability.stat_8h.count,
peer_reliability.stat_8h.reliability,
peer_reliability.stat_1d.weight,
peer_reliability.stat_1d.count,
peer_reliability.stat_1d.reliability,
peer_reliability.stat_1w.weight,
peer_reliability.stat_1w.count,
peer_reliability.stat_1w.reliability,
peer_reliability.stat_1m.weight,
peer_reliability.stat_1m.count,
peer_reliability.stat_1m.reliability,
peer_reliability.tries,
peer_reliability.successes,
),
)
await cursor.close()
async def get_peer_reliability(self, peer_id: str) -> PeerReliability:
return self.host_to_reliability[peer_id]
async def peer_failed_to_connect(self, peer: PeerRecord):
now = int(time.time())
age_timestamp = int(max(peer.last_try_timestamp, peer.connected_timestamp))
if age_timestamp == 0:
age_timestamp = now - 1000
replaced = dataclasses.replace(peer, try_count=peer.try_count + 1, last_try_timestamp=now)
reliability = await self.get_peer_reliability(peer.peer_id)
if reliability is None:
reliability = PeerReliability(peer.peer_id)
reliability.update(False, now - age_timestamp)
await self.add_peer(replaced, reliability)
async def peer_connected(self, peer: PeerRecord, tls_version: str):
now = int(time.time())
age_timestamp = int(max(peer.last_try_timestamp, peer.connected_timestamp))
if age_timestamp == 0:
age_timestamp = now - 1000
replaced = dataclasses.replace(peer, connected=True, connected_timestamp=now, tls_version=tls_version)
reliability = await self.get_peer_reliability(peer.peer_id)
if reliability is None:
reliability = PeerReliability(peer.peer_id)
reliability.update(True, now - age_timestamp)
await self.add_peer(replaced, reliability)
async def update_best_timestamp(self, host: str, timestamp):
if host not in self.host_to_records:
return
record = self.host_to_records[host]
replaced = dataclasses.replace(record, best_timestamp=timestamp)
if host not in self.host_to_reliability:
return
reliability = self.host_to_reliability[host]
await self.add_peer(replaced, reliability)
async def peer_connected_hostname(self, host: str, connected: bool = True, tls_version: str = "unknown"):
if host not in self.host_to_records:
return
record = self.host_to_records[host]
if connected:
await self.peer_connected(record, tls_version)
else:
await self.peer_failed_to_connect(record)
async def get_peers_to_crawl(self, min_batch_size, max_batch_size) -> List[PeerRecord]:
now = int(time.time())
records = []
records_v6 = []
counter = 0
self.ignored_peers = 0
self.banned_peers = 0
for peer_id in self.host_to_reliability:
add = False
counter += 1
reliability = self.host_to_reliability[peer_id]
if reliability.ignore_till < now and reliability.ban_till < now:
add = True
else:
if reliability.ban_till >= now:
self.banned_peers += 1
elif reliability.ignore_till >= now:
self.ignored_peers += 1
record = self.host_to_records[peer_id]
if record.last_try_timestamp == 0 and record.connected_timestamp == 0:
add = True
if peer_id in self.host_to_selected_time:
last_selected = self.host_to_selected_time[peer_id]
if time.time() - last_selected < 120:
add = False
if add:
v6 = True
try:
_ = ipaddress.IPv6Address(peer_id)
except ValueError:
v6 = False
delta_time = 600 if v6 else 1000
if now - record.last_try_timestamp >= delta_time and now - record.connected_timestamp >= delta_time:
if not v6:
records.append(record)
else:
records_v6.append(record)
batch_size = max(min_batch_size, len(records) // 10)
batch_size = min(batch_size, max_batch_size)
if len(records) > batch_size:
random.shuffle(records)
records = records[:batch_size]
if len(records_v6) > batch_size:
random.shuffle(records_v6)
records_v6 = records_v6[:batch_size]
records += records_v6
for record in records:
self.host_to_selected_time[record.peer_id] = time.time()
return records
def get_ipv6_peers(self) -> int:
counter = 0
for peer_id in self.host_to_reliability:
v6 = True
try:
_ = ipaddress.IPv6Address(peer_id)
except ValueError:
v6 = False
if v6:
counter += 1
return counter
def get_total_records(self) -> int:
return len(self.host_to_records)
def get_ignored_peers(self) -> int:
return self.ignored_peers
def get_banned_peers(self) -> int:
return self.banned_peers
def get_reliable_peers(self) -> int:
return self.reliable_peers
async def load_to_db(self):
log.error("Saving peers to DB...")
for peer_id in list(self.host_to_reliability.keys()):
if peer_id in self.host_to_reliability and peer_id in self.host_to_records:
reliability = self.host_to_reliability[peer_id]
record = self.host_to_records[peer_id]
await self.add_peer(record, reliability, True)
await self.crawl_db.commit()
log.error(" - Done saving peers to DB")
async def unload_from_db(self):
self.host_to_records = {}
self.host_to_reliability = {}
log.error("Loading peer reliability records...")
cursor = await self.crawl_db.execute(
"SELECT * from peer_reliability",
)
rows = await cursor.fetchall()
await cursor.close()
for row in rows:
reliability = PeerReliability(
row[0],
row[1],
row[2],
row[3],
row[4],
row[5],
row[6],
row[7],
row[8],
row[9],
row[10],
row[11],
row[12],
row[13],
row[14],
row[15],
row[16],
row[17],
row[18],
row[19],
)
self.host_to_reliability[row[0]] = reliability
log.error(" - Done loading peer reliability records...")
log.error("Loading peer records...")
cursor = await self.crawl_db.execute(
"SELECT * from peer_records",
)
rows = await cursor.fetchall()
await cursor.close()
for row in rows:
peer = PeerRecord(
row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11]
)
self.host_to_records[row[0]] = peer
log.error(" - Done loading peer records...")
# Crawler -> DNS.
async def load_reliable_peers_to_db(self):
peers = []
for peer_id in self.host_to_reliability:
reliability = self.host_to_reliability[peer_id]
if reliability.is_reliable():
peers.append(peer_id)
self.reliable_peers = len(peers)
log.error("Deleting old good_peers from DB...")
cursor = await self.crawl_db.execute(
"DELETE from good_peers",
)
await cursor.close()
log.error(" - Done deleting old good_peers...")
log.error("Saving new good_peers to DB...")
for peer in peers:
cursor = await self.crawl_db.execute(
"INSERT OR REPLACE INTO good_peers VALUES(?)",
(peer,),
)
await cursor.close()
await self.crawl_db.commit()
log.error(" - Done saving new good_peers to DB...")
def load_host_to_version(self):
versions = {}
handshake = {}
for host, record in self.host_to_records.items():
if host not in self.host_to_records:
continue
record = self.host_to_records[host]
if record.version == "undefined":
continue
if record.handshake_time < time.time() - 5 * 24 * 3600:
continue
versions[host] = record.version
handshake[host] = record.handshake_time
return (versions, handshake)
def load_best_peer_reliability(self):
best_timestamp = {}
for host, record in self.host_to_records.items():
if record.best_timestamp > time.time() - 5 * 24 * 3600:
best_timestamp[host] = record.best_timestamp
return best_timestamp
async def update_version(self, host, version, now):
record = self.host_to_records.get(host, None)
reliability = self.host_to_reliability.get(host, None)
if record is None or reliability is None:
return
record.update_version(version, now)
await self.add_peer(record, reliability)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/seeder/dns_server.py | flax/seeder/dns_server.py | import asyncio
import ipaddress
import logging
import random
import signal
import traceback
from pathlib import Path
from typing import Any, Dict, List
import aiosqlite
from dnslib import A, AAAA, SOA, NS, MX, CNAME, RR, DNSRecord, QTYPE, DNSHeader
from flax.util.flax_logging import initialize_logging
from flax.util.path import path_from_root
from flax.util.config import load_config
from flax.util.default_root import DEFAULT_ROOT_PATH
SERVICE_NAME = "seeder"
log = logging.getLogger(__name__)
# DNS snippet taken from: https://gist.github.com/pklaus/b5a7876d4d2cf7271873
class DomainName(str):
def __getattr__(self, item):
return DomainName(item + "." + self)
D = None
ns = None
IP = "127.0.0.1"
TTL = None
soa_record = None
ns_records: List[Any] = []
class EchoServerProtocol(asyncio.DatagramProtocol):
def __init__(self, callback):
self.data_queue = asyncio.Queue()
self.callback = callback
asyncio.ensure_future(self.respond())
def connection_made(self, transport):
self.transport = transport
def datagram_received(self, data, addr):
asyncio.ensure_future(self.handler(data, addr))
async def respond(self):
while True:
try:
resp, caller = await self.data_queue.get()
self.transport.sendto(resp, caller)
except Exception as e:
log.error(f"Exception: {e}. Traceback: {traceback.format_exc()}.")
async def handler(self, data, caller):
try:
data = await self.callback(data)
if data is None:
return
await self.data_queue.put((data, caller))
except Exception as e:
log.error(f"Exception: {e}. Traceback: {traceback.format_exc()}.")
class DNSServer:
reliable_peers_v4: List[str]
reliable_peers_v6: List[str]
lock: asyncio.Lock
pointer: int
crawl_db: aiosqlite.Connection
def __init__(self, config: Dict, root_path: Path):
self.reliable_peers_v4 = []
self.reliable_peers_v6 = []
self.lock = asyncio.Lock()
self.pointer_v4 = 0
self.pointer_v6 = 0
crawler_db_path: str = config.get("crawler_db_path", "crawler.db")
self.db_path = path_from_root(root_path, crawler_db_path)
self.db_path.parent.mkdir(parents=True, exist_ok=True)
async def start(self):
# self.crawl_db = await aiosqlite.connect(self.db_path)
# Get a reference to the event loop as we plan to use
# low-level APIs.
loop = asyncio.get_running_loop()
# One protocol instance will be created to serve all
# client requests.
self.transport, self.protocol = await loop.create_datagram_endpoint(
lambda: EchoServerProtocol(self.dns_response), local_addr=("0.0.0.0", 53)
)
self.reliable_task = asyncio.create_task(self.periodically_get_reliable_peers())
async def periodically_get_reliable_peers(self):
sleep_interval = 0
while True:
sleep_interval = min(15, sleep_interval + 1)
await asyncio.sleep(sleep_interval * 60)
try:
# TODO: double check this. It shouldn't take this long to connect.
crawl_db = await aiosqlite.connect(self.db_path, timeout=600)
cursor = await crawl_db.execute(
"SELECT * from good_peers",
)
new_reliable_peers = []
rows = await cursor.fetchall()
await cursor.close()
await crawl_db.close()
for row in rows:
new_reliable_peers.append(row[0])
if len(new_reliable_peers) > 0:
random.shuffle(new_reliable_peers)
async with self.lock:
self.reliable_peers_v4 = []
self.reliable_peers_v6 = []
for peer in new_reliable_peers:
ipv4 = True
try:
_ = ipaddress.IPv4Address(peer)
except ValueError:
ipv4 = False
if ipv4:
self.reliable_peers_v4.append(peer)
else:
try:
_ = ipaddress.IPv6Address(peer)
except ValueError:
continue
self.reliable_peers_v6.append(peer)
self.pointer_v4 = 0
self.pointer_v6 = 0
log.error(
f"Number of reliable peers discovered in dns server:"
f" IPv4 count - {len(self.reliable_peers_v4)}"
f" IPv6 count - {len(self.reliable_peers_v6)}"
)
except Exception as e:
log.error(f"Exception: {e}. Traceback: {traceback.format_exc()}.")
async def get_peers_to_respond(self, ipv4_count, ipv6_count):
peers = []
async with self.lock:
# Append IPv4.
size = len(self.reliable_peers_v4)
if ipv4_count > 0 and size <= ipv4_count:
peers = self.reliable_peers_v4
elif ipv4_count > 0:
peers = [self.reliable_peers_v4[i % size] for i in range(self.pointer_v4, self.pointer_v4 + ipv4_count)]
self.pointer_v4 = (self.pointer_v4 + ipv4_count) % size
# Append IPv6.
size = len(self.reliable_peers_v6)
if ipv6_count > 0 and size <= ipv6_count:
peers = peers + self.reliable_peers_v6
elif ipv6_count > 0:
peers = peers + [
self.reliable_peers_v6[i % size] for i in range(self.pointer_v6, self.pointer_v6 + ipv6_count)
]
self.pointer_v6 = (self.pointer_v6 + ipv6_count) % size
return peers
async def dns_response(self, data):
try:
request = DNSRecord.parse(data)
IPs = [MX(D.mail), soa_record] + ns_records
ipv4_count = 0
ipv6_count = 0
if request.q.qtype == 1:
ipv4_count = 32
elif request.q.qtype == 28:
ipv6_count = 32
elif request.q.qtype == 255:
ipv4_count = 16
ipv6_count = 16
else:
ipv4_count = 32
peers = await self.get_peers_to_respond(ipv4_count, ipv6_count)
if len(peers) == 0:
return None
for peer in peers:
ipv4 = True
try:
_ = ipaddress.IPv4Address(peer)
except ValueError:
ipv4 = False
if ipv4:
IPs.append(A(peer))
else:
try:
_ = ipaddress.IPv6Address(peer)
except ValueError:
continue
IPs.append(AAAA(peer))
reply = DNSRecord(DNSHeader(id=request.header.id, qr=1, aa=len(IPs), ra=1), q=request.q)
records = {
D: IPs,
D.ns1: [A(IP)], # MX and NS records must never point to a CNAME alias (RFC 2181 section 10.3)
D.ns2: [A(IP)],
D.mail: [A(IP)],
D.andrei: [CNAME(D)],
}
qname = request.q.qname
qn = str(qname)
qtype = request.q.qtype
qt = QTYPE[qtype]
if qn == D or qn.endswith("." + D):
for name, rrs in records.items():
if name == qn:
for rdata in rrs:
rqt = rdata.__class__.__name__
if qt in ["*", rqt] or (qt == "ANY" and (rqt == "A" or rqt == "AAAA")):
reply.add_answer(
RR(rname=qname, rtype=getattr(QTYPE, rqt), rclass=1, ttl=TTL, rdata=rdata)
)
for rdata in ns_records:
reply.add_ar(RR(rname=D, rtype=QTYPE.NS, rclass=1, ttl=TTL, rdata=rdata))
reply.add_auth(RR(rname=D, rtype=QTYPE.SOA, rclass=1, ttl=TTL, rdata=soa_record))
return reply.pack()
except Exception as e:
log.error(f"Exception: {e}. Traceback: {traceback.format_exc()}.")
async def serve_dns(config: Dict, root_path: Path):
dns_server = DNSServer(config, root_path)
await dns_server.start()
# TODO: Make this cleaner?
while True:
await asyncio.sleep(3600)
async def kill_processes():
# TODO: implement.
pass
def signal_received():
asyncio.create_task(kill_processes())
async def async_main(config, root_path):
loop = asyncio.get_running_loop()
try:
loop.add_signal_handler(signal.SIGINT, signal_received)
loop.add_signal_handler(signal.SIGTERM, signal_received)
except NotImplementedError:
log.info("signal handlers unsupported")
await serve_dns(config, root_path)
def main():
root_path = DEFAULT_ROOT_PATH
config = load_config(root_path, "config.yaml", SERVICE_NAME)
initialize_logging(SERVICE_NAME, config["logging"], root_path)
global D
global ns
global TTL
global soa_record
global ns_records
D = DomainName(config["domain_name"])
ns = DomainName(config["nameserver"])
TTL = config["ttl"]
soa_record = SOA(
mname=ns, # primary name server
rname=config["soa"]["rname"], # email of the domain administrator
times=(
config["soa"]["serial_number"],
config["soa"]["refresh"],
config["soa"]["retry"],
config["soa"]["expire"],
config["soa"]["minimum"],
),
)
ns_records = [NS(ns)]
asyncio.run(async_main(config=config, root_path=root_path))
if __name__ == "__main__":
main()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/seeder/peer_record.py | flax/seeder/peer_record.py | from __future__ import annotations
import math
import time
from dataclasses import dataclass
from flax.util.ints import uint32, uint64
from flax.util.streamable import Streamable, streamable
@streamable
@dataclass(frozen=True)
class PeerRecord(Streamable):
peer_id: str
ip_address: str
port: uint32
connected: bool
last_try_timestamp: uint64
try_count: uint32
connected_timestamp: uint64
added_timestamp: uint64
best_timestamp: uint64
version: str
handshake_time: uint64
tls_version: str
def update_version(self, version, now):
if version != "undefined":
object.__setattr__(self, "version", version)
object.__setattr__(self, "handshake_time", uint64(now))
class PeerStat:
weight: float
count: float
reliability: float
def __init__(self, weight, count, reliability):
self.weight = weight
self.count = count
self.reliability = reliability
def update(self, is_reachable: bool, age: int, tau: int):
f = math.exp(-age / tau)
self.reliability = self.reliability * f + (1.0 - f if is_reachable else 0.0)
self.count = self.count * f + 1.0
self.weight = self.weight * f + 1.0 - f
class PeerReliability:
peer_id: str
ignore_till: int
ban_till: int
stat_2h: PeerStat
stat_8h: PeerStat
stat_1d: PeerStat
stat_1w: PeerStat
stat_1m: PeerStat
tries: int
successes: int
def __init__(
self,
peer_id: str,
ignore_till: int = 0,
ban_till: int = 0,
stat_2h_weight: float = 0.0,
stat_2h_count: float = 0.0,
stat_2h_reliability: float = 0.0,
stat_8h_weight: float = 0.0,
stat_8h_count: float = 0.0,
stat_8h_reliability: float = 0.0,
stat_1d_weight: float = 0.0,
stat_1d_count: float = 0.0,
stat_1d_reliability: float = 0.0,
stat_1w_weight: float = 0.0,
stat_1w_count: float = 0.0,
stat_1w_reliability: float = 0.0,
stat_1m_weight: float = 0.0,
stat_1m_count: float = 0.0,
stat_1m_reliability: float = 0.0,
tries: int = 0,
successes: int = 0,
):
self.peer_id = peer_id
self.ignore_till = ignore_till
self.ban_till = ban_till
self.stat_2h = PeerStat(stat_2h_weight, stat_2h_count, stat_2h_reliability)
self.stat_8h = PeerStat(stat_8h_weight, stat_8h_count, stat_8h_reliability)
self.stat_1d = PeerStat(stat_1d_weight, stat_1d_count, stat_1d_reliability)
self.stat_1w = PeerStat(stat_1w_weight, stat_1w_count, stat_1w_reliability)
self.stat_1m = PeerStat(stat_1m_weight, stat_1m_count, stat_1m_reliability)
self.tries = tries
self.successes = successes
def is_reliable(self) -> bool:
if self.tries > 0 and self.tries <= 3 and self.successes * 2 >= self.tries:
return True
if self.stat_2h.reliability > 0.85 and self.stat_2h.count > 2:
return True
if self.stat_8h.reliability > 0.7 and self.stat_8h.count > 4:
return True
if self.stat_1d.reliability > 0.55 and self.stat_1d.count > 8:
return True
if self.stat_1w.reliability > 0.45 and self.stat_1w.count > 16:
return True
if self.stat_1m.reliability > 0.35 and self.stat_1m.count > 32:
return True
return False
def get_ban_time(self) -> int:
if self.is_reliable():
return 0
if self.stat_1m.reliability - self.stat_1m.weight + 1 < 0.15 and self.stat_1m.count > 32:
return 30 * 86400
if self.stat_1w.reliability - self.stat_1w.weight + 1.0 < 0.10 and self.stat_1w.count > 16:
return 7 * 86400
if self.stat_1d.reliability - self.stat_1d.weight + 1.0 < 0.05 and self.stat_1d.count > 8:
return 86400
return 0
def get_ignore_time(self) -> int:
if self.is_reliable():
return 0
if self.stat_1m.reliability - self.stat_1m.weight + 1.0 < 0.20 and self.stat_1m.count > 2:
return 10 * 86400
if self.stat_1w.reliability - self.stat_1w.weight + 1.0 < 0.16 and self.stat_1w.count > 2:
return 3 * 86400
if self.stat_1d.reliability - self.stat_1d.weight + 1.0 < 0.12 and self.stat_1d.count > 2:
return 8 * 3600
if self.stat_8h.reliability - self.stat_8h.weight + 1.0 < 0.08 and self.stat_8h.count > 2:
return 2 * 3600
return 0
def update(self, is_reachable: bool, age: int):
self.stat_2h.update(is_reachable, age, 2 * 3600)
self.stat_8h.update(is_reachable, age, 8 * 3600)
self.stat_1d.update(is_reachable, age, 24 * 3600)
self.stat_1w.update(is_reachable, age, 7 * 24 * 3600)
self.stat_1m.update(is_reachable, age, 24 * 30 * 3600)
self.tries += 1
if is_reachable:
self.successes += 1
current_ignore_time = self.get_ignore_time()
now = int(time.time())
if current_ignore_time > 0 and (self.ignore_till == 0 or self.ignore_till < current_ignore_time + now):
self.ignore_till = current_ignore_time + now
current_ban_time = self.get_ban_time()
if current_ban_time > 0 and (self.ban_till == 0 or self.ban_till < current_ban_time + now):
self.ban_till = current_ban_time + now
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/seeder/__init__.py | flax/seeder/__init__.py | python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false | |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/seeder/start_crawler.py | flax/seeder/start_crawler.py | from __future__ import annotations
import logging
import pathlib
import sys
from multiprocessing import freeze_support
from typing import Dict, Optional
from flax.consensus.constants import ConsensusConstants
from flax.consensus.default_constants import DEFAULT_CONSTANTS
from flax.rpc.crawler_rpc_api import CrawlerRpcApi
from flax.seeder.crawler import Crawler
from flax.seeder.crawler_api import CrawlerAPI
from flax.server.outbound_message import NodeType
from flax.server.start_service import RpcInfo, Service, async_run
from flax.util.flax_logging import initialize_service_logging
from flax.util.config import load_config, load_config_cli
from flax.util.default_root import DEFAULT_ROOT_PATH
# See: https://bugs.python.org/issue29288
"".encode("idna")
SERVICE_NAME = "seeder"
log = logging.getLogger(__name__)
def create_full_node_crawler_service(
root_path: pathlib.Path,
config: Dict,
consensus_constants: ConsensusConstants,
connect_to_daemon: bool = True,
) -> Service[Crawler]:
service_config = config[SERVICE_NAME]
crawler = Crawler(
service_config,
root_path=root_path,
consensus_constants=consensus_constants,
)
api = CrawlerAPI(crawler)
network_id = service_config["selected_network"]
rpc_info: Optional[RpcInfo] = None
if service_config.get("start_rpc_server", True):
rpc_info = (CrawlerRpcApi, service_config.get("rpc_port", 8561))
return Service(
root_path=root_path,
config=config,
node=api.crawler,
peer_api=api,
node_type=NodeType.FULL_NODE,
advertised_port=service_config["port"],
service_name="full_node",
upnp_ports=[],
server_listen_ports=[service_config["port"]],
on_connect_callback=crawler.on_connect,
network_id=network_id,
rpc_info=rpc_info,
connect_to_daemon=connect_to_daemon,
)
async def async_main() -> int:
# TODO: refactor to avoid the double load
config = load_config(DEFAULT_ROOT_PATH, "config.yaml")
service_config = load_config_cli(DEFAULT_ROOT_PATH, "config.yaml", SERVICE_NAME)
config[SERVICE_NAME] = service_config
overrides = service_config["network_overrides"]["constants"][service_config["selected_network"]]
updated_constants = DEFAULT_CONSTANTS.replace_str_to_bytes(**overrides)
initialize_service_logging(service_name=SERVICE_NAME, config=config)
service = create_full_node_crawler_service(DEFAULT_ROOT_PATH, config, updated_constants)
await service.setup_process_global_state()
await service.run()
return 0
def main() -> int:
freeze_support()
return async_run(async_main())
if __name__ == "__main__":
sys.exit(main())
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/seeder/crawler.py | flax/seeder/crawler.py | import asyncio
import logging
import time
import traceback
import ipaddress
from collections import defaultdict
from pathlib import Path
from typing import Any, Callable, Dict, List, Optional, Tuple
import aiosqlite
import flax.server.ws_connection as ws
from flax.consensus.constants import ConsensusConstants
from flax.full_node.coin_store import CoinStore
from flax.protocols import full_node_protocol
from flax.rpc.rpc_server import default_get_connections
from flax.seeder.crawl_store import CrawlStore
from flax.seeder.peer_record import PeerRecord, PeerReliability
from flax.server.outbound_message import NodeType
from flax.server.server import FlaxServer
from flax.types.peer_info import PeerInfo
from flax.util.path import path_from_root
from flax.util.ints import uint32, uint64
log = logging.getLogger(__name__)
class Crawler:
sync_store: Any
coin_store: CoinStore
connection: aiosqlite.Connection
config: Dict
_server: Optional[FlaxServer]
crawl_store: Optional[CrawlStore]
log: logging.Logger
constants: ConsensusConstants
_shut_down: bool
root_path: Path
peer_count: int
with_peak: set
minimum_version_count: int
@property
def server(self) -> FlaxServer:
# This is a stop gap until the class usage is refactored such the values of
# integral attributes are known at creation of the instance.
if self._server is None:
raise RuntimeError("server not assigned")
return self._server
def __init__(
self,
config: Dict,
root_path: Path,
consensus_constants: ConsensusConstants,
name: str = None,
):
self.initialized = False
self.root_path = root_path
self.config = config
self._server = None
self._shut_down = False # Set to true to close all infinite loops
self.constants = consensus_constants
self.state_changed_callback: Optional[Callable] = None
self.crawl_store = None
self.log = log
self.peer_count = 0
self.with_peak = set()
self.peers_retrieved: List[Any] = []
self.host_to_version: Dict[str, str] = {}
self.version_cache: List[Tuple[str, str]] = []
self.handshake_time: Dict[str, int] = {}
self.best_timestamp_per_peer: Dict[str, int] = {}
crawler_db_path: str = config.get("crawler_db_path", "crawler.db")
self.db_path = path_from_root(root_path, crawler_db_path)
self.db_path.parent.mkdir(parents=True, exist_ok=True)
self.bootstrap_peers = config["bootstrap_peers"]
self.minimum_height = config["minimum_height"]
self.other_peers_port = config["other_peers_port"]
self.versions: Dict[str, int] = defaultdict(lambda: 0)
self.minimum_version_count = self.config.get("minimum_version_count", 100)
if self.minimum_version_count < 1:
self.log.warning(
f"Crawler configuration minimum_version_count expected to be greater than zero: "
f"{self.minimum_version_count!r}"
)
def _set_state_changed_callback(self, callback: Callable):
self.state_changed_callback = callback
def get_connections(self, request_node_type: Optional[NodeType]) -> List[Dict[str, Any]]:
return default_get_connections(server=self.server, request_node_type=request_node_type)
async def create_client(self, peer_info, on_connect):
return await self.server.start_client(peer_info, on_connect)
async def connect_task(self, peer):
async def peer_action(peer: ws.WSFlaxConnection):
peer_info = peer.get_peer_info()
version = peer.get_version()
if peer_info is not None and version is not None:
self.version_cache.append((peer_info.host, version))
# Ask peer for peers
response = await peer.request_peers(full_node_protocol.RequestPeers(), timeout=3)
# Add peers to DB
if isinstance(response, full_node_protocol.RespondPeers):
self.peers_retrieved.append(response)
peer_info = peer.get_peer_info()
tries = 0
got_peak = False
while tries < 25:
tries += 1
if peer_info is None:
break
if peer_info in self.with_peak:
got_peak = True
break
await asyncio.sleep(0.1)
if not got_peak and peer_info is not None and self.crawl_store is not None:
await self.crawl_store.peer_connected_hostname(peer_info.host, False)
await peer.close()
try:
connected = await self.create_client(PeerInfo(peer.ip_address, peer.port), peer_action)
if not connected:
await self.crawl_store.peer_failed_to_connect(peer)
except Exception as e:
self.log.info(f"Exception: {e}. Traceback: {traceback.format_exc()}.")
await self.crawl_store.peer_failed_to_connect(peer)
async def _start(self):
# We override the default peer_connect_timeout when running from the crawler
crawler_peer_timeout = self.config.get("peer_connect_timeout", 2)
self.server.config["peer_connect_timeout"] = crawler_peer_timeout
self.task = asyncio.create_task(self.crawl())
async def crawl(self):
# Ensure the state_changed callback is set up before moving on
# Sometimes, the daemon connection + state changed callback isn't up and ready
# by the time we get to the first _state_changed call, so this just ensures it's there before moving on
while self.state_changed_callback is None:
self.log.info("Waiting for state changed callback...")
await asyncio.sleep(0.1)
try:
self.connection = await aiosqlite.connect(self.db_path)
self.crawl_store = await CrawlStore.create(self.connection)
self.log.info("Started")
t_start = time.time()
total_nodes = 0
self.seen_nodes = set()
tried_nodes = set()
for peer in self.bootstrap_peers:
new_peer = PeerRecord(
peer,
peer,
self.other_peers_port,
False,
0,
0,
0,
uint64(int(time.time())),
uint64(0),
"undefined",
uint64(0),
tls_version="unknown",
)
new_peer_reliability = PeerReliability(peer)
self.crawl_store.maybe_add_peer(new_peer, new_peer_reliability)
self.host_to_version, self.handshake_time = self.crawl_store.load_host_to_version()
self.best_timestamp_per_peer = self.crawl_store.load_best_peer_reliability()
self.versions = defaultdict(lambda: 0)
for host, version in self.host_to_version.items():
self.versions[version] += 1
self._state_changed("loaded_initial_peers")
while True:
self.with_peak = set()
peers_to_crawl = await self.crawl_store.get_peers_to_crawl(25000, 250000)
tasks = set()
for peer in peers_to_crawl:
if peer.port == self.other_peers_port:
total_nodes += 1
if peer.ip_address not in tried_nodes:
tried_nodes.add(peer.ip_address)
task = asyncio.create_task(self.connect_task(peer))
tasks.add(task)
if len(tasks) >= 250:
await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED)
tasks = set(filter(lambda t: not t.done(), tasks))
if len(tasks) > 0:
await asyncio.wait(tasks, timeout=30)
for response in self.peers_retrieved:
for response_peer in response.peer_list:
if response_peer.host not in self.best_timestamp_per_peer:
self.best_timestamp_per_peer[response_peer.host] = response_peer.timestamp
self.best_timestamp_per_peer[response_peer.host] = max(
self.best_timestamp_per_peer[response_peer.host], response_peer.timestamp
)
if (
response_peer.host not in self.seen_nodes
and response_peer.timestamp > time.time() - 5 * 24 * 3600
):
self.seen_nodes.add(response_peer.host)
new_peer = PeerRecord(
response_peer.host,
response_peer.host,
uint32(response_peer.port),
False,
uint64(0),
uint32(0),
uint64(0),
uint64(int(time.time())),
uint64(response_peer.timestamp),
"undefined",
uint64(0),
tls_version="unknown",
)
new_peer_reliability = PeerReliability(response_peer.host)
if self.crawl_store is not None:
self.crawl_store.maybe_add_peer(new_peer, new_peer_reliability)
await self.crawl_store.update_best_timestamp(
response_peer.host,
self.best_timestamp_per_peer[response_peer.host],
)
for host, version in self.version_cache:
self.handshake_time[host] = int(time.time())
self.host_to_version[host] = version
await self.crawl_store.update_version(host, version, int(time.time()))
to_remove = set()
now = int(time.time())
for host in self.host_to_version.keys():
active = True
if host not in self.handshake_time:
active = False
elif self.handshake_time[host] < now - 5 * 24 * 3600:
active = False
if not active:
to_remove.add(host)
self.host_to_version = {
host: version for host, version in self.host_to_version.items() if host not in to_remove
}
self.best_timestamp_per_peer = {
host: timestamp
for host, timestamp in self.best_timestamp_per_peer.items()
if timestamp >= now - 5 * 24 * 3600
}
self.versions = defaultdict(lambda: 0)
for host, version in self.host_to_version.items():
self.versions[version] += 1
self.version_cache = []
self.peers_retrieved = []
self.server.banned_peers = {}
if len(peers_to_crawl) == 0:
continue
# Try up to 5 times to write to the DB in case there is a lock that causes a timeout
for i in range(1, 5):
try:
await self.crawl_store.load_to_db()
await self.crawl_store.load_reliable_peers_to_db()
except Exception as e:
self.log.error(f"Exception while saving to DB: {e}.")
self.log.error("Waiting 5 seconds before retry...")
await asyncio.sleep(5)
continue
break
total_records = self.crawl_store.get_total_records()
ipv6_count = self.crawl_store.get_ipv6_peers()
self.log.error("***")
self.log.error("Finished batch:")
self.log.error(f"Total IPs stored in DB: {total_records}.")
self.log.error(f"Total IPV6 addresses stored in DB: {ipv6_count}")
self.log.error(f"Total connections attempted since crawler started: {total_nodes}.")
self.log.error(f"Total unique nodes attempted since crawler started: {len(tried_nodes)}.")
t_now = time.time()
t_delta = int(t_now - t_start)
if t_delta > 0:
self.log.error(f"Avg connections per second: {total_nodes // t_delta}.")
# Periodically print detailed stats.
reliable_peers = self.crawl_store.get_reliable_peers()
self.log.error(f"High quality reachable nodes, used by DNS introducer in replies: {reliable_peers}")
banned_peers = self.crawl_store.get_banned_peers()
ignored_peers = self.crawl_store.get_ignored_peers()
available_peers = len(self.host_to_version)
addresses_count = len(self.best_timestamp_per_peer)
total_records = self.crawl_store.get_total_records()
ipv6_addresses_count = 0
for host in self.best_timestamp_per_peer.keys():
try:
ipaddress.IPv6Address(host)
ipv6_addresses_count += 1
except ipaddress.AddressValueError:
continue
self.log.error(
"IPv4 addresses gossiped with timestamp in the last 5 days with respond_peers messages: "
f"{addresses_count - ipv6_addresses_count}."
)
self.log.error(
"IPv6 addresses gossiped with timestamp in the last 5 days with respond_peers messages: "
f"{ipv6_addresses_count}."
)
ipv6_available_peers = 0
for host in self.host_to_version.keys():
try:
ipaddress.IPv6Address(host)
ipv6_available_peers += 1
except ipaddress.AddressValueError:
continue
self.log.error(
f"Total IPv4 nodes reachable in the last 5 days: {available_peers - ipv6_available_peers}."
)
self.log.error(f"Total IPv6 nodes reachable in the last 5 days: {ipv6_available_peers}.")
self.log.error("Version distribution among reachable in the last 5 days (at least 100 nodes):")
for version, count in sorted(self.versions.items(), key=lambda kv: kv[1], reverse=True):
if count >= self.minimum_version_count:
self.log.error(f"Version: {version} - Count: {count}")
self.log.error(f"Banned addresses in the DB: {banned_peers}")
self.log.error(f"Temporary ignored addresses in the DB: {ignored_peers}")
self.log.error(
"Peers to crawl from in the next batch (total IPs - ignored - banned): "
f"{total_records - banned_peers - ignored_peers}"
)
self.log.error("***")
self._state_changed("crawl_batch_completed")
except Exception as e:
self.log.error(f"Exception: {e}. Traceback: {traceback.format_exc()}.")
def set_server(self, server: FlaxServer):
self._server = server
def _state_changed(self, change: str, change_data: Optional[Dict[str, Any]] = None):
if self.state_changed_callback is not None:
self.state_changed_callback(change, change_data)
async def new_peak(self, request: full_node_protocol.NewPeak, peer: ws.WSFlaxConnection):
try:
peer_info = peer.get_peer_info()
tls_version = peer.get_tls_version()
if tls_version is None:
tls_version = "unknown"
if peer_info is None:
return
if request.height >= self.minimum_height:
if self.crawl_store is not None:
await self.crawl_store.peer_connected_hostname(peer_info.host, True, tls_version)
self.with_peak.add(peer_info)
except Exception as e:
self.log.error(f"Exception: {e}. Traceback: {traceback.format_exc()}.")
async def on_connect(self, connection: ws.WSFlaxConnection):
pass
def _close(self):
self._shut_down = True
async def _await_closed(self):
await self.connection.close()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/lineage_proof.py | flax/wallet/lineage_proof.py | from dataclasses import dataclass
from typing import Optional, Any, List
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.blockchain_format.program import Program
from flax.util.ints import uint64
from flax.util.streamable import Streamable, streamable
@streamable
@dataclass(frozen=True)
class LineageProof(Streamable):
parent_name: Optional[bytes32] = None
inner_puzzle_hash: Optional[bytes32] = None
amount: Optional[uint64] = None
def to_program(self) -> Program:
final_list: List[Any] = []
if self.parent_name is not None:
final_list.append(self.parent_name)
if self.inner_puzzle_hash is not None:
final_list.append(self.inner_puzzle_hash)
if self.amount is not None:
final_list.append(self.amount)
return Program.to(final_list)
def is_none(self) -> bool:
return all([self.parent_name is None, self.inner_puzzle_hash is None, self.amount is None])
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/notification_manager.py | flax/wallet/notification_manager.py | from __future__ import annotations
import dataclasses
import logging
from typing import Any, Dict, List, Optional, Set
from blspy import G2Element
from flax.protocols.wallet_protocol import CoinState
from flax.types.announcement import Announcement
from flax.types.blockchain_format.coin import Coin
from flax.types.blockchain_format.program import Program
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.coin_spend import CoinSpend
from flax.types.spend_bundle import SpendBundle
from flax.util.db_wrapper import DBWrapper2
from flax.util.ints import uint64
from flax.wallet.notification_store import Notification, NotificationStore
from flax.wallet.transaction_record import TransactionRecord
from flax.wallet.util.compute_memos import compute_memos_for_spend
from flax.wallet.util.notifications import construct_notification
class NotificationManager:
wallet_state_manager: Any
log: logging.Logger
notification_store: NotificationStore
@staticmethod
async def create(
wallet_state_manager: Any,
db_wrapper: DBWrapper2,
name: Optional[str] = None,
) -> NotificationManager:
self = NotificationManager()
if name:
self.log = logging.getLogger(name)
else:
self.log = logging.getLogger(__name__)
self.wallet_state_manager = wallet_state_manager
self.notification_store = await NotificationStore.create(db_wrapper)
return self
async def potentially_add_new_notification(self, coin_state: CoinState, parent_spend: CoinSpend) -> bool:
if (
coin_state.spent_height is None
or not self.wallet_state_manager.wallet_node.config.get("accept_notifications", False)
or self.wallet_state_manager.wallet_node.config.get("required_notification_amount", 0)
> coin_state.coin.amount
):
return False
else:
coin_name: bytes32 = coin_state.coin.name()
memos: Dict[bytes32, List[bytes]] = compute_memos_for_spend(parent_spend)
coin_memos: List[bytes] = memos.get(coin_name, [])
if (
len(coin_memos) == 2
and construct_notification(bytes32(coin_memos[0]), uint64(coin_state.coin.amount)).get_tree_hash()
== coin_state.coin.puzzle_hash
):
await self.notification_store.add_notification(
Notification(
coin_state.coin.name(),
coin_memos[1],
uint64(coin_state.coin.amount),
)
)
return True
async def send_new_notification(
self, target: bytes32, msg: bytes, amount: uint64, fee: uint64 = uint64(0)
) -> TransactionRecord:
coins: Set[Coin] = await self.wallet_state_manager.main_wallet.select_coins(uint64(amount + fee))
origin_coin: bytes32 = next(iter(coins)).name()
notification_puzzle: Program = construct_notification(target, amount)
notification_hash: bytes32 = notification_puzzle.get_tree_hash()
notification_coin: Coin = Coin(origin_coin, notification_hash, amount)
notification_spend = CoinSpend(
notification_coin,
notification_puzzle,
Program.to(None),
)
extra_spend_bundle = SpendBundle([notification_spend], G2Element())
flax_tx = await self.wallet_state_manager.main_wallet.generate_signed_transaction(
amount,
notification_hash,
fee,
coins=coins,
origin_id=origin_coin,
coin_announcements_to_consume={Announcement(notification_coin.name(), b"")},
memos=[target, msg],
)
full_tx: TransactionRecord = dataclasses.replace(
flax_tx, spend_bundle=SpendBundle.aggregate([flax_tx.spend_bundle, extra_spend_bundle])
)
return full_tx
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/wallet_user_store.py | flax/wallet/wallet_user_store.py | from __future__ import annotations
from typing import List, Optional
from flax.util.db_wrapper import DBWrapper2, execute_fetchone
from flax.util.ints import uint32
from flax.wallet.util.wallet_types import WalletType
from flax.wallet.wallet_info import WalletInfo
class WalletUserStore:
"""
WalletUserStore keeps track of all user created wallets and necessary smart-contract data
"""
cache_size: uint32
db_wrapper: DBWrapper2
@classmethod
async def create(cls, db_wrapper: DBWrapper2):
self = cls()
self.db_wrapper = db_wrapper
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute(
(
"CREATE TABLE IF NOT EXISTS users_wallets("
"id INTEGER PRIMARY KEY AUTOINCREMENT,"
" name text,"
" wallet_type int,"
" data text)"
)
)
await conn.execute("CREATE INDEX IF NOT EXISTS name on users_wallets(name)")
await conn.execute("CREATE INDEX IF NOT EXISTS type on users_wallets(wallet_type)")
await conn.execute("CREATE INDEX IF NOT EXISTS data on users_wallets(data)")
await self.init_wallet()
return self
async def init_wallet(self):
all_wallets = await self.get_all_wallet_info_entries()
if len(all_wallets) == 0:
await self.create_wallet("Flax Wallet", WalletType.STANDARD_WALLET, "")
async def create_wallet(
self,
name: str,
wallet_type: int,
data: str,
id: Optional[int] = None,
) -> WalletInfo:
async with self.db_wrapper.writer_maybe_transaction() as conn:
cursor = await conn.execute(
"INSERT INTO users_wallets VALUES(?, ?, ?, ?)",
(id, name, wallet_type, data),
)
await cursor.close()
wallet = await self.get_last_wallet()
if wallet is None:
raise ValueError("Failed to get the just-created wallet")
return wallet
async def delete_wallet(self, id: int):
async with self.db_wrapper.writer_maybe_transaction() as conn:
await (await conn.execute("DELETE FROM users_wallets where id=?", (id,))).close()
async def update_wallet(self, wallet_info: WalletInfo):
async with self.db_wrapper.writer_maybe_transaction() as conn:
cursor = await conn.execute(
"INSERT or REPLACE INTO users_wallets VALUES(?, ?, ?, ?)",
(
wallet_info.id,
wallet_info.name,
wallet_info.type,
wallet_info.data,
),
)
await cursor.close()
async def get_last_wallet(self) -> Optional[WalletInfo]:
async with self.db_wrapper.reader_no_transaction() as conn:
row = await execute_fetchone(conn, "SELECT MAX(id) FROM users_wallets")
return None if row is None else await self.get_wallet_by_id(row[0])
async def get_all_wallet_info_entries(self, wallet_type: Optional[WalletType] = None) -> List[WalletInfo]:
"""
Return a set containing all wallets, optionally with a specific WalletType
"""
async with self.db_wrapper.reader_no_transaction() as conn:
if wallet_type is None:
rows = await conn.execute_fetchall("SELECT * from users_wallets")
else:
rows = await conn.execute_fetchall(
"SELECT * from users_wallets WHERE wallet_type=?", (wallet_type.value,)
)
return [WalletInfo(row[0], row[1], row[2], row[3]) for row in rows]
async def get_wallet_by_id(self, id: int) -> Optional[WalletInfo]:
"""
Return a wallet by id
"""
async with self.db_wrapper.reader_no_transaction() as conn:
row = await execute_fetchone(conn, "SELECT * from users_wallets WHERE id=?", (id,))
return None if row is None else WalletInfo(row[0], row[1], row[2], row[3])
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/wallet_node.py | flax/wallet/wallet_node.py | import asyncio
import dataclasses
import logging
import multiprocessing
import random
import sys
import time
import traceback
from pathlib import Path
from typing import Any, Callable, Dict, Iterator, List, Optional, Set, Tuple
from blspy import AugSchemeMPL, PrivateKey, G2Element, G1Element
from packaging.version import Version
from flax.consensus.block_record import BlockRecord
from flax.consensus.blockchain import ReceiveBlockResult
from flax.consensus.constants import ConsensusConstants
from flax.daemon.keychain_proxy import (
KeychainProxy,
connect_to_keychain_and_validate,
wrap_local_keychain,
)
from flax.protocols import wallet_protocol
from flax.protocols.full_node_protocol import RequestProofOfWeight, RespondProofOfWeight
from flax.protocols.protocol_message_types import ProtocolMessageTypes
from flax.protocols.wallet_protocol import (
CoinState,
RespondBlockHeader,
RespondToCoinUpdates,
RespondToPhUpdates,
)
from flax.rpc.rpc_server import default_get_connections
from flax.server.node_discovery import WalletPeers
from flax.server.outbound_message import Message, NodeType, make_msg
from flax.server.peer_store_resolver import PeerStoreResolver
from flax.server.server import FlaxServer
from flax.server.ws_connection import WSFlaxConnection
from flax.types.blockchain_format.coin import Coin
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.blockchain_format.sub_epoch_summary import SubEpochSummary
from flax.types.coin_spend import CoinSpend
from flax.types.header_block import HeaderBlock
from flax.types.mempool_inclusion_status import MempoolInclusionStatus
from flax.types.peer_info import PeerInfo
from flax.types.weight_proof import WeightProof
from flax.util.chunks import chunks
from flax.util.config import WALLET_PEERS_PATH_KEY_DEPRECATED, process_config_start_method
from flax.util.errors import KeychainIsLocked, KeychainProxyConnectionFailure, KeychainIsEmpty, KeychainKeyNotFound
from flax.util.ints import uint32, uint64
from flax.util.keychain import Keychain
from flax.util.path import path_from_root
from flax.util.profiler import profile_task
from flax.util.memory_profiler import mem_profile_task
from flax.wallet.transaction_record import TransactionRecord
from flax.wallet.util.new_peak_queue import NewPeakItem, NewPeakQueue, NewPeakQueueTypes
from flax.wallet.util.peer_request_cache import PeerRequestCache, can_use_peer_request_cache
from flax.wallet.util.wallet_sync_utils import (
fetch_header_blocks_in_range,
fetch_last_tx_from_peer,
last_change_height_cs,
PeerRequestException,
request_and_validate_additions,
request_and_validate_removals,
request_header_blocks,
subscribe_to_coin_updates,
subscribe_to_phs,
)
from flax.wallet.wallet_state_manager import WalletStateManager
from flax.wallet.wallet_weight_proof_handler import get_wp_fork_point, WalletWeightProofHandler
def get_wallet_db_path(root_path: Path, config: Dict[str, Any], key_fingerprint: str) -> Path:
"""
Construct a path to the wallet db. Uses config values and the wallet key's fingerprint to
determine the wallet db filename.
"""
db_path_replaced: str = (
config["database_path"].replace("CHALLENGE", config["selected_network"]).replace("KEY", key_fingerprint)
)
# "v2_r1" is the current wallet db version identifier
if "v2_r1" not in db_path_replaced:
db_path_replaced = db_path_replaced.replace("v2", "v2_r1").replace("v1", "v2_r1")
path: Path = path_from_root(root_path, db_path_replaced)
return path
@dataclasses.dataclass
class WalletNode:
config: Dict
root_path: Path
constants: ConsensusConstants
local_keychain: Optional[Keychain] = None
log: logging.Logger = logging.getLogger(__name__)
# Normal operation data
cached_blocks: Dict = dataclasses.field(default_factory=dict)
future_block_hashes: Dict = dataclasses.field(default_factory=dict)
# Sync data
proof_hashes: List = dataclasses.field(default_factory=list)
state_changed_callback: Optional[Callable] = None
_wallet_state_manager: Optional[WalletStateManager] = None
_weight_proof_handler: Optional[WalletWeightProofHandler] = None
_server: Optional[FlaxServer] = None
wsm_close_task: Optional[asyncio.Task] = None
sync_task: Optional[asyncio.Task] = None
logged_in_fingerprint: Optional[int] = None
peer_task: Optional[asyncio.Task] = None
logged_in: bool = False
_keychain_proxy: Optional[KeychainProxy] = None
height_to_time: Dict[uint32, uint64] = dataclasses.field(default_factory=dict)
# Peers that we have long synced to
synced_peers: Set[bytes32] = dataclasses.field(default_factory=set)
wallet_peers: Optional[WalletPeers] = None
wallet_peers_initialized: bool = False
valid_wp_cache: Dict[bytes32, Any] = dataclasses.field(default_factory=dict)
untrusted_caches: Dict[bytes32, PeerRequestCache] = dataclasses.field(default_factory=dict)
# in Untrusted mode wallet might get the state update before receiving the block
race_cache: Dict[bytes32, Set[CoinState]] = dataclasses.field(default_factory=dict)
race_cache_hashes: List[Tuple[uint32, bytes32]] = dataclasses.field(default_factory=list)
node_peaks: Dict[bytes32, Tuple[uint32, bytes32]] = dataclasses.field(default_factory=dict)
validation_semaphore: Optional[asyncio.Semaphore] = None
local_node_synced: bool = False
LONG_SYNC_THRESHOLD: int = 300
last_wallet_tx_resend_time: int = 0
# Duration in seconds
wallet_tx_resend_timeout_secs: int = 1800
_new_peak_queue: Optional[NewPeakQueue] = None
full_node_peer: Optional[PeerInfo] = None
_shut_down: bool = False
_process_new_subscriptions_task: Optional[asyncio.Task] = None
_retry_failed_states_task: Optional[asyncio.Task] = None
_primary_peer_sync_task: Optional[asyncio.Task] = None
_secondary_peer_sync_task: Optional[asyncio.Task] = None
@property
def keychain_proxy(self) -> KeychainProxy:
# This is a stop gap until the class usage is refactored such the values of
# integral attributes are known at creation of the instance.
if self._keychain_proxy is None:
raise RuntimeError("keychain proxy not assigned")
return self._keychain_proxy
@property
def wallet_state_manager(self) -> WalletStateManager:
# This is a stop gap until the class usage is refactored such the values of
# integral attributes are known at creation of the instance.
if self._wallet_state_manager is None:
raise RuntimeError("wallet state manager not assigned")
return self._wallet_state_manager
@property
def server(self) -> FlaxServer:
# This is a stop gap until the class usage is refactored such the values of
# integral attributes are known at creation of the instance.
if self._server is None:
raise RuntimeError("server not assigned")
return self._server
@property
def new_peak_queue(self) -> NewPeakQueue:
# This is a stop gap until the class usage is refactored such the values of
# integral attributes are known at creation of the instance.
if self._new_peak_queue is None:
raise RuntimeError("new peak queue not assigned")
return self._new_peak_queue
def get_connections(self, request_node_type: Optional[NodeType]) -> List[Dict[str, Any]]:
return default_get_connections(server=self.server, request_node_type=request_node_type)
async def ensure_keychain_proxy(self) -> KeychainProxy:
if self._keychain_proxy is None:
if self.local_keychain:
self._keychain_proxy = wrap_local_keychain(self.local_keychain, log=self.log)
else:
self._keychain_proxy = await connect_to_keychain_and_validate(self.root_path, self.log)
if not self._keychain_proxy:
raise KeychainProxyConnectionFailure()
return self._keychain_proxy
def get_cache_for_peer(self, peer) -> PeerRequestCache:
if peer.peer_node_id not in self.untrusted_caches:
self.untrusted_caches[peer.peer_node_id] = PeerRequestCache()
return self.untrusted_caches[peer.peer_node_id]
def rollback_request_caches(self, reorg_height: int):
# Everything after reorg_height should be removed from the cache
for cache in self.untrusted_caches.values():
cache.clear_after_height(reorg_height)
async def get_key_for_fingerprint(self, fingerprint: Optional[int]) -> Optional[PrivateKey]:
try:
keychain_proxy = await self.ensure_keychain_proxy()
# Returns first private key if fingerprint is None
key = await keychain_proxy.get_key_for_fingerprint(fingerprint)
except KeychainIsEmpty:
self.log.warning("No keys present. Create keys with the UI, or with the 'flax keys' program.")
return None
except KeychainKeyNotFound:
self.log.warning(f"Key not found for fingerprint {fingerprint}")
return None
except KeychainIsLocked:
self.log.warning("Keyring is locked")
return None
except KeychainProxyConnectionFailure as e:
tb = traceback.format_exc()
self.log.error(f"Missing keychain_proxy: {e} {tb}")
raise # Re-raise so that the caller can decide whether to continue or abort
return key
async def get_private_key(self, fingerprint: Optional[int]) -> Optional[PrivateKey]:
"""
Attempt to get the private key for the given fingerprint. If the fingerprint is None,
get_key_for_fingerprint() will return the first private key. Similarly, if a key isn't
returned for the provided fingerprint, the first key will be returned.
"""
key: Optional[PrivateKey] = await self.get_key_for_fingerprint(fingerprint)
if key is None and fingerprint is not None:
key = await self.get_key_for_fingerprint(None)
if key is not None:
self.log.info(f"Using first key found (fingerprint: {key.get_g1().get_fingerprint()})")
return key
async def _start(self) -> None:
await self._start_with_fingerprint()
async def _start_with_fingerprint(
self,
fingerprint: Optional[int] = None,
) -> bool:
# Makes sure the coin_state_updates get higher priority than new_peak messages.
# Delayed instantiation until here to avoid errors.
# got Future <Future pending> attached to a different loop
self._new_peak_queue = NewPeakQueue(inner_queue=asyncio.PriorityQueue())
multiprocessing_start_method = process_config_start_method(config=self.config, log=self.log)
multiprocessing_context = multiprocessing.get_context(method=multiprocessing_start_method)
self._weight_proof_handler = WalletWeightProofHandler(self.constants, multiprocessing_context)
self.synced_peers = set()
private_key = await self.get_private_key(fingerprint or self.get_last_used_fingerprint())
if private_key is None:
self.log_out()
return False
if self.config.get("enable_profiler", False):
if sys.getprofile() is not None:
self.log.warning("not enabling profiler, getprofile() is already set")
else:
asyncio.create_task(profile_task(self.root_path, "wallet", self.log))
if self.config.get("enable_memory_profiler", False):
asyncio.create_task(mem_profile_task(self.root_path, "wallet", self.log))
path: Path = get_wallet_db_path(self.root_path, self.config, str(private_key.get_g1().get_fingerprint()))
path.parent.mkdir(parents=True, exist_ok=True)
self._wallet_state_manager = await WalletStateManager.create(
private_key,
self.config,
path,
self.constants,
self.server,
self.root_path,
self,
)
assert self._wallet_state_manager is not None
if self._wallet_state_manager.blockchain.synced_weight_proof is not None:
weight_proof = self._wallet_state_manager.blockchain.synced_weight_proof
success, _, records = await self._weight_proof_handler.validate_weight_proof(weight_proof, True)
assert success is True and records is not None and len(records) > 1
await self._wallet_state_manager.blockchain.new_valid_weight_proof(weight_proof, records)
if self.wallet_peers is None:
self.initialize_wallet_peers()
if self.state_changed_callback is not None:
self.wallet_state_manager.set_callback(self.state_changed_callback)
self.last_wallet_tx_resend_time = int(time.time())
self.last_state_retry_time = int(time.time())
self.wallet_tx_resend_timeout_secs = self.config.get("tx_resend_timeout_secs", 60 * 60)
self.wallet_state_manager.set_pending_callback(self._pending_tx_handler)
self._shut_down = False
self._process_new_subscriptions_task = asyncio.create_task(self._process_new_subscriptions())
self._retry_failed_states_task = asyncio.create_task(self._retry_failed_states())
self.sync_event = asyncio.Event()
self.log_in(private_key)
self.wallet_state_manager.set_sync_mode(False)
async with self.wallet_state_manager.puzzle_store.lock:
index = await self.wallet_state_manager.puzzle_store.get_last_derivation_path()
if index is None or index < self.wallet_state_manager.initial_num_public_keys - 1:
await self.wallet_state_manager.create_more_puzzle_hashes(from_zero=True)
self.wsm_close_task = None
return True
def _close(self):
self.log.info("self._close")
self.log_out()
self._shut_down = True
if self._process_new_subscriptions_task is not None:
self._process_new_subscriptions_task.cancel()
if self._retry_failed_states_task is not None:
self._retry_failed_states_task.cancel()
if self._primary_peer_sync_task is not None:
self._primary_peer_sync_task.cancel()
if self._secondary_peer_sync_task is not None:
self._secondary_peer_sync_task.cancel()
async def _await_closed(self, shutting_down: bool = True):
self.log.info("self._await_closed")
if self._server is not None:
await self.server.close_all_connections()
if self._weight_proof_handler is not None:
self._weight_proof_handler.cancel_weight_proof_tasks()
if self.wallet_peers is not None:
await self.wallet_peers.ensure_is_closed()
if self._wallet_state_manager is not None:
await self.wallet_state_manager._await_closed()
self._wallet_state_manager = None
if shutting_down and self._keychain_proxy is not None:
proxy = self._keychain_proxy
self._keychain_proxy = None
await proxy.close()
await asyncio.sleep(0.5) # https://docs.aiohttp.org/en/stable/client_advanced.html#graceful-shutdown
self.wallet_peers = None
def _set_state_changed_callback(self, callback: Callable):
self.state_changed_callback = callback
if self._wallet_state_manager is not None:
self.wallet_state_manager.set_callback(self.state_changed_callback)
self.wallet_state_manager.set_pending_callback(self._pending_tx_handler)
def _pending_tx_handler(self):
if self._wallet_state_manager is None:
return None
asyncio.create_task(self._resend_queue())
async def _resend_queue(self):
if self._shut_down or self._server is None or self._wallet_state_manager is None:
return None
for msg, sent_peers in await self._messages_to_resend():
if self._shut_down or self._server is None or self._wallet_state_manager is None:
return None
full_nodes = self.server.get_connections(NodeType.FULL_NODE)
for peer in full_nodes:
if peer.peer_node_id in sent_peers:
continue
self.log.debug(f"sending: {msg}")
await peer.send_message(msg)
async def _messages_to_resend(self) -> List[Tuple[Message, Set[bytes32]]]:
if self._wallet_state_manager is None or self._shut_down:
return []
messages: List[Tuple[Message, Set[bytes32]]] = []
current_time = int(time.time())
retry_accepted_txs = False
if self.last_wallet_tx_resend_time < current_time - self.wallet_tx_resend_timeout_secs:
self.last_wallet_tx_resend_time = current_time
retry_accepted_txs = True
records: List[TransactionRecord] = await self.wallet_state_manager.tx_store.get_not_sent(
include_accepted_txs=retry_accepted_txs
)
for record in records:
if record.spend_bundle is None:
continue
msg = make_msg(
ProtocolMessageTypes.send_transaction,
wallet_protocol.SendTransaction(record.spend_bundle),
)
already_sent = set()
for peer, status, _ in record.sent_to:
if status == MempoolInclusionStatus.SUCCESS.value:
already_sent.add(bytes32.from_hexstr(peer))
messages.append((msg, already_sent))
return messages
async def _retry_failed_states(self):
while not self._shut_down:
try:
await asyncio.sleep(5)
current_time = time.time()
if self.last_state_retry_time < current_time - 10:
self.last_state_retry_time = current_time
if self.wallet_state_manager is None:
continue
states_to_retry = await self.wallet_state_manager.retry_store.get_all_states_to_retry()
for state, peer_id, fork_height in states_to_retry:
matching_peer = tuple(
p for p in self.server.get_connections(NodeType.FULL_NODE) if p.peer_node_id == peer_id
)
if len(matching_peer) == 0:
peer = self.get_full_node_peer()
if peer is None:
self.log.info(f"disconnected from all peers, cannot retry state: {state}")
continue
else:
self.log.info(
f"disconnected from peer {peer_id}, state will retry with {peer.peer_node_id}"
)
else:
peer = matching_peer[0]
async with self.wallet_state_manager.db_wrapper.writer():
self.log.info(f"retrying coin_state: {state}")
try:
await self.wallet_state_manager.new_coin_state(
[state], peer, None if fork_height == 0 else fork_height
)
except Exception as e:
self.log.exception(f"Exception while adding states.. : {e}")
else:
await self.wallet_state_manager.blockchain.clean_block_records()
except asyncio.CancelledError:
self.log.info("Retry task cancelled, exiting.")
raise
async def _process_new_subscriptions(self):
while not self._shut_down:
# Here we process four types of messages in the queue, where the first one has higher priority (lower
# number in the queue), and priority decreases for each type.
peer: Optional[WSFlaxConnection] = None
item: Optional[NewPeakItem] = None
try:
peer, item = None, None
item = await self.new_peak_queue.get()
self.log.debug("Pulled from queue: %s", item)
assert item is not None
if item.item_type == NewPeakQueueTypes.COIN_ID_SUBSCRIPTION:
# Subscriptions are the highest priority, because we don't want to process any more peaks or
# state updates until we are sure that we subscribed to everything that we need to. Otherwise,
# we might not be able to process some state.
coin_ids: List[bytes32] = item.data
for peer in self.server.get_connections(NodeType.FULL_NODE):
coin_states: List[CoinState] = await subscribe_to_coin_updates(coin_ids, peer, uint32(0))
if len(coin_states) > 0:
async with self.wallet_state_manager.lock:
await self.receive_state_from_peer(coin_states, peer)
elif item.item_type == NewPeakQueueTypes.PUZZLE_HASH_SUBSCRIPTION:
puzzle_hashes: List[bytes32] = item.data
for peer in self.server.get_connections(NodeType.FULL_NODE):
# Puzzle hash subscription
coin_states: List[CoinState] = await subscribe_to_phs(puzzle_hashes, peer, uint32(0))
if len(coin_states) > 0:
async with self.wallet_state_manager.lock:
await self.receive_state_from_peer(coin_states, peer)
elif item.item_type == NewPeakQueueTypes.FULL_NODE_STATE_UPDATED:
# Note: this can take a while when we have a lot of transactions. We want to process these
# before new_peaks, since new_peak_wallet requires that we first obtain the state for that peak.
request: wallet_protocol.CoinStateUpdate = item.data[0]
peer = item.data[1]
assert peer is not None
await self.state_update_received(request, peer)
elif item.item_type == NewPeakQueueTypes.NEW_PEAK_WALLET:
# This can take a VERY long time, because it might trigger a long sync. It is OK if we miss some
# subscriptions or state updates, since all subscriptions and state updates will be handled by
# long_sync (up to the target height).
request: wallet_protocol.NewPeakWallet = item.data[0]
peer = item.data[1]
assert peer is not None
await self.new_peak_wallet(request, peer)
else:
assert False
except asyncio.CancelledError:
self.log.info("Queue task cancelled, exiting.")
raise
except Exception as e:
self.log.error(f"Exception handling {item}, {e} {traceback.format_exc()}")
if peer is not None:
await peer.close(9999)
def log_in(self, sk: PrivateKey):
self.logged_in_fingerprint = sk.get_g1().get_fingerprint()
self.logged_in = True
self.log.info(f"Wallet is logged in using key with fingerprint: {self.logged_in_fingerprint}")
try:
self.update_last_used_fingerprint()
except Exception:
self.log.exception("Non-fatal: Unable to update last used fingerprint.")
def log_out(self):
self.logged_in_fingerprint = None
self.logged_in = False
def update_last_used_fingerprint(self) -> None:
fingerprint = self.logged_in_fingerprint
assert fingerprint is not None
path = self.get_last_used_fingerprint_path()
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(str(fingerprint))
self.log.info(f"Updated last used fingerprint: {fingerprint}")
def get_last_used_fingerprint(self) -> Optional[int]:
fingerprint: Optional[int] = None
try:
path = self.get_last_used_fingerprint_path()
if path.exists():
fingerprint = int(path.read_text().strip())
except Exception:
self.log.exception("Non-fatal: Unable to read last used fingerprint.")
return fingerprint
def get_last_used_fingerprint_path(self) -> Path:
db_path: Path = path_from_root(self.root_path, self.config["database_path"])
fingerprint_path = db_path.parent / "last_used_fingerprint"
return fingerprint_path
def set_server(self, server: FlaxServer):
self._server = server
self.initialize_wallet_peers()
def initialize_wallet_peers(self):
self.server.on_connect = self.on_connect
network_name = self.config["selected_network"]
connect_to_unknown_peers = self.config.get("connect_to_unknown_peers", True)
testing = self.config.get("testing", False)
if self.wallet_peers is None and connect_to_unknown_peers and not testing:
self.wallet_peers = WalletPeers(
self.server,
self.config["target_peer_count"],
PeerStoreResolver(
self.root_path,
self.config,
selected_network=network_name,
peers_file_path_key="wallet_peers_file_path",
legacy_peer_db_path_key=WALLET_PEERS_PATH_KEY_DEPRECATED,
default_peers_file_path="wallet/db/wallet_peers.dat",
),
self.config["introducer_peer"],
self.config.get("dns_servers", ["dns-introducer.flaxnetwork.org"]),
self.config["peer_connect_interval"],
network_name,
None,
self.log,
)
asyncio.create_task(self.wallet_peers.start())
def on_disconnect(self, peer: WSFlaxConnection):
if self.is_trusted(peer):
self.local_node_synced = False
self.initialize_wallet_peers()
if peer.peer_node_id in self.untrusted_caches:
self.untrusted_caches.pop(peer.peer_node_id)
if peer.peer_node_id in self.synced_peers:
self.synced_peers.remove(peer.peer_node_id)
if peer.peer_node_id in self.node_peaks:
self.node_peaks.pop(peer.peer_node_id)
async def on_connect(self, peer: WSFlaxConnection):
if self._wallet_state_manager is None:
return None
if Version(peer.protocol_version) < Version("0.0.33"):
self.log.info("Disconnecting, full node running old software")
await peer.close()
trusted = self.is_trusted(peer)
if not trusted and self.local_node_synced:
await peer.close()
if peer.peer_node_id in self.synced_peers:
self.synced_peers.remove(peer.peer_node_id)
self.log.info(f"Connected peer {peer.get_peer_info()} is trusted: {trusted}")
messages_peer_ids = await self._messages_to_resend()
self.wallet_state_manager.state_changed("add_connection")
for msg, peer_ids in messages_peer_ids:
if peer.peer_node_id in peer_ids:
continue
await peer.send_message(msg)
if self.wallet_peers is not None:
await self.wallet_peers.on_connect(peer)
async def perform_atomic_rollback(self, fork_height: int, cache: Optional[PeerRequestCache] = None):
self.log.info(f"perform_atomic_rollback to {fork_height}")
# this is to start a write transaction
async with self.wallet_state_manager.db_wrapper.writer():
try:
removed_wallet_ids = await self.wallet_state_manager.reorg_rollback(fork_height)
await self.wallet_state_manager.blockchain.set_finished_sync_up_to(fork_height, in_rollback=True)
if cache is None:
self.rollback_request_caches(fork_height)
else:
cache.clear_after_height(fork_height)
except Exception as e:
tb = traceback.format_exc()
self.log.error(f"Exception while perform_atomic_rollback: {e} {tb}")
raise
else:
await self.wallet_state_manager.blockchain.clean_block_records()
for wallet_id in removed_wallet_ids:
self.wallet_state_manager.wallets.pop(wallet_id)
# this has to be called *after* the transaction commits, otherwise it
# won't see the changes (since we spawn a new task to handle potential
# resends)
self._pending_tx_handler()
async def long_sync(
self,
target_height: uint32,
full_node: WSFlaxConnection,
fork_height: int,
*,
rollback: bool,
):
"""
Sync algorithm:
- Download and verify weight proof (if not trusted)
- Roll back anything after the fork point (if rollback=True)
- Subscribe to all puzzle_hashes over and over until there are no more updates
- Subscribe to all coin_ids over and over until there are no more updates
- rollback=False means that we are just double-checking with this peer to make sure we don't have any
missing transactions, so we don't need to rollback
"""
def is_new_state_update(cs: CoinState) -> bool:
if cs.spent_height is None and cs.created_height is None:
return True
if cs.spent_height is not None and cs.spent_height >= fork_height:
return True
if cs.created_height is not None and cs.created_height >= fork_height:
return True
return False
trusted: bool = self.is_trusted(full_node)
self.log.info(f"Starting sync trusted: {trusted} to peer {full_node.peer_host}")
start_time = time.time()
if rollback:
# we should clear all peers since this is a full rollback
await self.perform_atomic_rollback(fork_height)
await self.update_ui()
# We only process new state updates to avoid slow reprocessing. We set the sync height after adding
# Things, so we don't have to reprocess these later. There can be many things in ph_update_res.
already_checked_ph: Set[bytes32] = set()
continue_while: bool = True
all_puzzle_hashes: List[bytes32] = await self.get_puzzle_hashes_to_subscribe()
while continue_while:
# Get all phs from puzzle store
ph_chunks: Iterator[List[bytes32]] = chunks(all_puzzle_hashes, 1000)
for chunk in ph_chunks:
ph_update_res: List[CoinState] = await subscribe_to_phs(
[p for p in chunk if p not in already_checked_ph], full_node, 0
)
ph_update_res = list(filter(is_new_state_update, ph_update_res))
if not await self.receive_state_from_peer(ph_update_res, full_node, update_finished_height=True):
# If something goes wrong, abort sync
return
already_checked_ph.update(chunk)
# Check if new puzzle hashed have been created
await self.wallet_state_manager.create_more_puzzle_hashes()
all_puzzle_hashes = await self.get_puzzle_hashes_to_subscribe()
continue_while = False
for ph in all_puzzle_hashes:
if ph not in already_checked_ph:
continue_while = True
break
self.log.info(f"Successfully subscribed and updated {len(already_checked_ph)} puzzle hashes")
# The number of coin id updates are usually going to be significantly less than ph updates, so we can
# sync from 0 every time.
continue_while = True
all_coin_ids: List[bytes32] = await self.get_coin_ids_to_subscribe(0)
already_checked_coin_ids: Set[bytes32] = set()
while continue_while:
one_k_chunks = chunks(all_coin_ids, 1000)
for chunk in one_k_chunks:
c_update_res: List[CoinState] = await subscribe_to_coin_updates(chunk, full_node, 0)
if not await self.receive_state_from_peer(c_update_res, full_node):
# If something goes wrong, abort sync
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | true |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/transaction_sorting.py | flax/wallet/transaction_sorting.py | from __future__ import annotations
import enum
class SortKey(enum.Enum):
CONFIRMED_AT_HEIGHT = "ORDER BY confirmed_at_height {ASC}"
RELEVANCE = "ORDER BY confirmed {ASC}, confirmed_at_height {DESC}, created_at_time {DESC}"
def ascending(self) -> str:
return self.value.format(ASC="ASC", DESC="DESC")
def descending(self) -> str:
return self.value.format(ASC="DESC", DESC="ASC")
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/wallet_node_api.py | flax/wallet/wallet_node_api.py | from flax.protocols import full_node_protocol, introducer_protocol, wallet_protocol
from flax.server.outbound_message import NodeType
from flax.server.ws_connection import WSFlaxConnection
from flax.types.mempool_inclusion_status import MempoolInclusionStatus
from flax.util.api_decorators import api_request, peer_required, execute_task
from flax.util.errors import Err
from flax.wallet.wallet_node import WalletNode
class WalletNodeAPI:
wallet_node: WalletNode
def __init__(self, wallet_node) -> None:
self.wallet_node = wallet_node
@property
def log(self):
return self.wallet_node.log
@property
def api_ready(self):
return self.wallet_node.logged_in
@peer_required
@api_request
async def respond_removals(self, response: wallet_protocol.RespondRemovals, peer: WSFlaxConnection):
pass
async def reject_removals_request(self, response: wallet_protocol.RejectRemovalsRequest, peer: WSFlaxConnection):
"""
The full node has rejected our request for removals.
"""
pass
@api_request
async def reject_additions_request(self, response: wallet_protocol.RejectAdditionsRequest):
"""
The full node has rejected our request for additions.
"""
pass
@execute_task
@peer_required
@api_request
async def new_peak_wallet(self, peak: wallet_protocol.NewPeakWallet, peer: WSFlaxConnection):
"""
The full node sent as a new peak
"""
self.wallet_node.node_peaks[peer.peer_node_id] = (peak.height, peak.header_hash)
await self.wallet_node.new_peak_queue.new_peak_wallet(peak, peer)
@api_request
async def reject_header_request(self, response: wallet_protocol.RejectHeaderRequest):
"""
The full node has rejected our request for a header.
"""
pass
@api_request
async def respond_block_header(self, response: wallet_protocol.RespondBlockHeader):
pass
@peer_required
@api_request
async def respond_additions(self, response: wallet_protocol.RespondAdditions, peer: WSFlaxConnection):
pass
@api_request
async def respond_proof_of_weight(self, response: full_node_protocol.RespondProofOfWeight):
pass
@peer_required
@api_request
async def transaction_ack(self, ack: wallet_protocol.TransactionAck, peer: WSFlaxConnection):
"""
This is an ack for our previous SendTransaction call. This removes the transaction from
the send queue if we have sent it to enough nodes.
"""
async with self.wallet_node.wallet_state_manager.lock:
assert peer.peer_node_id is not None
name = peer.peer_node_id.hex()
status = MempoolInclusionStatus(ack.status)
try:
wallet_state_manager = self.wallet_node.wallet_state_manager
except RuntimeError as e:
if "not assigned" in str(e):
return None
raise
if status == MempoolInclusionStatus.SUCCESS:
self.wallet_node.log.info(
f"SpendBundle has been received and accepted to mempool by the FullNode. {ack}"
)
elif status == MempoolInclusionStatus.PENDING:
self.wallet_node.log.info(f"SpendBundle has been received (and is pending) by the FullNode. {ack}")
else:
if not self.wallet_node.is_trusted(peer) and ack.error == Err.NO_TRANSACTIONS_WHILE_SYNCING.name:
self.wallet_node.log.info(f"Peer {peer.get_peer_info()} is not synced, closing connection")
await peer.close()
return
self.wallet_node.log.warning(f"SpendBundle has been rejected by the FullNode. {ack}")
if ack.error is not None:
await wallet_state_manager.remove_from_queue(ack.txid, name, status, Err[ack.error])
else:
await wallet_state_manager.remove_from_queue(ack.txid, name, status, None)
@peer_required
@api_request
async def respond_peers_introducer(
self, request: introducer_protocol.RespondPeersIntroducer, peer: WSFlaxConnection
):
if self.wallet_node.wallet_peers is not None:
await self.wallet_node.wallet_peers.respond_peers(request, peer.get_peer_info(), False)
if peer is not None and peer.connection_type is NodeType.INTRODUCER:
await peer.close()
@peer_required
@api_request
async def respond_peers(self, request: full_node_protocol.RespondPeers, peer: WSFlaxConnection):
if self.wallet_node.wallet_peers is None:
return None
self.log.info(f"Wallet received {len(request.peer_list)} peers.")
await self.wallet_node.wallet_peers.respond_peers(request, peer.get_peer_info(), True)
return None
@api_request
async def respond_puzzle_solution(self, request: wallet_protocol.RespondPuzzleSolution):
self.log.error("Unexpected message `respond_puzzle_solution`. Peer might be slow to respond")
return None
@api_request
async def reject_puzzle_solution(self, request: wallet_protocol.RejectPuzzleSolution):
self.log.warning(f"Reject puzzle solution: {request}")
@api_request
async def respond_header_blocks(self, request: wallet_protocol.RespondHeaderBlocks):
pass
@api_request
async def respond_block_headers(self, request: wallet_protocol.RespondBlockHeaders):
pass
@api_request
async def reject_header_blocks(self, request: wallet_protocol.RejectHeaderBlocks):
self.log.warning(f"Reject header blocks: {request}")
@api_request
async def reject_block_headers(self, request: wallet_protocol.RejectBlockHeaders):
pass
@execute_task
@peer_required
@api_request
async def coin_state_update(self, request: wallet_protocol.CoinStateUpdate, peer: WSFlaxConnection):
await self.wallet_node.new_peak_queue.full_node_state_updated(request, peer)
@api_request
async def respond_to_ph_update(self, request: wallet_protocol.RespondToPhUpdates):
pass
@api_request
async def respond_to_coin_update(self, request: wallet_protocol.RespondToCoinUpdates):
pass
@api_request
async def respond_children(self, request: wallet_protocol.RespondChildren):
pass
@api_request
async def respond_ses_hashes(self, request: wallet_protocol.RespondSESInfo):
pass
@api_request
async def respond_blocks(self, request: full_node_protocol.RespondBlocks) -> None:
pass
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/block_record.py | flax/wallet/block_record.py | from __future__ import annotations
from dataclasses import dataclass
from typing import List
from flax.types.blockchain_format.coin import Coin
from flax.types.header_block import HeaderBlock
from flax.util.streamable import Streamable, streamable
@streamable
@dataclass(frozen=True)
class HeaderBlockRecord(Streamable):
"""
These are values that are stored in the wallet database, corresponding to information
that the wallet cares about in each block
"""
header: HeaderBlock
additions: List[Coin] # A block record without additions is not finished
removals: List[Coin] # A block record without removals is not finished
@property
def header_hash(self):
return self.header.header_hash
@property
def prev_header_hash(self):
return self.header.prev_header_hash
@property
def height(self):
return self.header.height
@property
def transactions_filter(self):
return self.header.transactions_filter
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/wallet_pool_store.py | flax/wallet/wallet_pool_store.py | import logging
from typing import List, Tuple
from flax.types.coin_spend import CoinSpend
from flax.util.db_wrapper import DBWrapper2
from flax.util.ints import uint32
log = logging.getLogger(__name__)
class WalletPoolStore:
db_wrapper: DBWrapper2
@classmethod
async def create(cls, wrapper: DBWrapper2):
self = cls()
self.db_wrapper = wrapper
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute(
"CREATE TABLE IF NOT EXISTS pool_state_transitions("
" transition_index integer,"
" wallet_id integer,"
" height bigint,"
" coin_spend blob,"
" PRIMARY KEY(transition_index, wallet_id))"
)
return self
async def add_spend(
self,
wallet_id: int,
spend: CoinSpend,
height: uint32,
) -> None:
"""
Appends (or replaces) entries in the DB. The new list must be at least as long as the existing list, and the
parent of the first spend must already be present in the DB. Note that this is not committed to the DB
until db_wrapper.commit() is called. However it is written to the cache, so it can be fetched with
get_all_state_transitions.
"""
async with self.db_wrapper.writer_maybe_transaction() as conn:
# find the most recent transition in wallet_id
rows = list(
await conn.execute_fetchall(
"SELECT transition_index, height, coin_spend "
"FROM pool_state_transitions "
"WHERE wallet_id=? "
"ORDER BY transition_index DESC "
"LIMIT 1",
(wallet_id,),
)
)
serialized_spend = bytes(spend)
if len(rows) == 0:
transition_index = 0
else:
existing = list(
await conn.execute_fetchall(
"SELECT COUNT(*) "
"FROM pool_state_transitions "
"WHERE wallet_id=? AND height=? AND coin_spend=?",
(wallet_id, height, serialized_spend),
)
)
if existing[0][0] != 0:
# we already have this transition in the DB
return
row = rows[0]
if height < row[1]:
raise ValueError("Height cannot go down")
prev = CoinSpend.from_bytes(row[2])
if spend.coin.parent_coin_info != prev.coin.name():
raise ValueError("New spend does not extend")
transition_index = row[0]
cursor = await conn.execute(
"INSERT OR IGNORE INTO pool_state_transitions VALUES (?, ?, ?, ?)",
(
transition_index + 1,
wallet_id,
height,
serialized_spend,
),
)
await cursor.close()
async def get_spends_for_wallet(self, wallet_id: int) -> List[Tuple[uint32, CoinSpend]]:
"""
Retrieves all entries for a wallet ID.
"""
async with self.db_wrapper.reader_no_transaction() as conn:
rows = await conn.execute_fetchall(
"SELECT height, coin_spend FROM pool_state_transitions WHERE wallet_id=? ORDER BY transition_index",
(wallet_id,),
)
return [(uint32(row[0]), CoinSpend.from_bytes(row[1])) for row in rows]
async def rollback(self, height: int, wallet_id_arg: int) -> None:
"""
Rollback removes all entries which have entry_height > height passed in. Note that this is not committed to the
DB until db_wrapper.commit() is called. However it is written to the cache, so it can be fetched with
get_all_state_transitions.
"""
async with self.db_wrapper.writer_maybe_transaction() as conn:
cursor = await conn.execute(
"DELETE FROM pool_state_transitions WHERE height>? AND wallet_id=?", (height, wallet_id_arg)
)
await cursor.close()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/trade_manager.py | flax/wallet/trade_manager.py | from __future__ import annotations
import dataclasses
import logging
import time
import traceback
from typing import Any, Dict, List, Optional, Set, Tuple, Union
from typing_extensions import Literal
from flax.data_layer.data_layer_wallet import DataLayerWallet
from flax.protocols.wallet_protocol import CoinState
from flax.server.ws_connection import WSFlaxConnection
from flax.types.blockchain_format.coin import Coin, coin_as_list
from flax.types.blockchain_format.program import Program
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.spend_bundle import SpendBundle
from flax.util.db_wrapper import DBWrapper2
from flax.util.hash import std_hash
from flax.util.ints import uint32, uint64
from flax.wallet.db_wallet.db_wallet_puzzles import ACS_MU_PH
from flax.wallet.nft_wallet.nft_wallet import NFTWallet
from flax.wallet.outer_puzzles import AssetType
from flax.wallet.payment import Payment
from flax.wallet.puzzle_drivers import PuzzleInfo, Solver
from flax.wallet.puzzles.load_clvm import load_clvm_maybe_recompile
from flax.wallet.trade_record import TradeRecord
from flax.wallet.trading.offer import NotarizedPayment, Offer
from flax.wallet.trading.trade_status import TradeStatus
from flax.wallet.trading.trade_store import TradeStore
from flax.wallet.transaction_record import TransactionRecord
from flax.wallet.util.transaction_type import TransactionType
from flax.wallet.util.wallet_types import WalletType
from flax.wallet.wallet import Wallet
from flax.wallet.wallet_coin_record import WalletCoinRecord
OFFER_MOD = load_clvm_maybe_recompile("settlement_payments.clvm")
class TradeManager:
"""
This class is a driver for creating and accepting settlement_payments.clvm style offers.
By default, standard XFX is supported but to support other types of assets you must implement certain functions on
the asset's wallet as well as create a driver for its puzzle(s). Here is a guide to integrating a new types of
assets with this trade manager:
Puzzle Drivers:
- See flax/wallet/outer_puzzles.py for a full description of how to build these
- The `solve` method must be able to be solved by a Solver that looks like this:
Solver(
{
"coin": bytes
"parent_spend": bytes
"siblings": List[bytes] # other coins of the same type being offered
"sibling_spends": List[bytes] # The parent spends for the siblings
"sibling_puzzles": List[Program] # The inner puzzles of the siblings (always OFFER_MOD)
"sibling_solutions": List[Program] # The inner solution of the siblings
}
)
Wallet:
- Segments in this code that call general wallet methods are highlighted by comments: # ATTENTION: new wallets
- To be able to be traded, a wallet must implement these methods on itself:
- generate_signed_transaction(...) -> List[TransactionRecord] (See cat_wallet.py for full API)
- convert_puzzle_hash(puzzle_hash: bytes32) -> bytes32 # Converts a puzzlehash from outer to inner puzzle
- get_puzzle_info(asset_id: bytes32) -> PuzzleInfo
- get_coins_to_offer(asset_id: bytes32, amount: uint64) -> Set[Coin]
- If you would like assets from your wallet to be referenced with just a wallet ID, you must also implement:
- get_asset_id() -> bytes32
- Finally, you must make sure that your wallet will respond appropriately when these WSM methods are called:
- get_wallet_for_puzzle_info(puzzle_info: PuzzleInfo) -> <Your wallet>
- create_wallet_for_puzzle_info(..., puzzle_info: PuzzleInfo) -> <Your wallet> (See cat_wallet.py for full API)
- get_wallet_for_asset_id(asset_id: bytes32) -> <Your wallet>
"""
wallet_state_manager: Any
log: logging.Logger
trade_store: TradeStore
@staticmethod
async def create(
wallet_state_manager: Any,
db_wrapper: DBWrapper2,
name: Optional[str] = None,
) -> TradeManager:
self = TradeManager()
if name:
self.log = logging.getLogger(name)
else:
self.log = logging.getLogger(__name__)
self.wallet_state_manager = wallet_state_manager
self.trade_store = await TradeStore.create(db_wrapper)
return self
async def get_offers_with_status(self, status: TradeStatus) -> List[TradeRecord]:
records = await self.trade_store.get_trade_record_with_status(status)
return records
async def get_coins_of_interest(
self,
) -> Set[bytes32]:
"""
Returns list of coins we want to check if they are included in filter,
These will include coins that belong to us and coins that that on other side of treade
"""
coin_ids = await self.trade_store.get_coin_ids_of_interest_with_trade_statuses(
trade_statuses=[TradeStatus.PENDING_ACCEPT, TradeStatus.PENDING_CONFIRM, TradeStatus.PENDING_CANCEL]
)
return coin_ids
async def get_trade_by_coin(self, coin: Coin) -> Optional[TradeRecord]:
all_trades = await self.get_all_trades()
for trade in all_trades:
if trade.status == TradeStatus.CANCELLED.value:
continue
if coin in trade.coins_of_interest:
return trade
return None
async def coins_of_interest_farmed(
self, coin_state: CoinState, fork_height: Optional[uint32], peer: WSFlaxConnection
) -> None:
"""
If both our coins and other coins in trade got removed that means that trade was successfully executed
If coins from other side of trade got farmed without ours, that means that trade failed because either someone
else completed trade or other side of trade canceled the trade by doing a spend.
If our coins got farmed but coins from other side didn't, we successfully canceled trade by spending inputs.
"""
self.log.info(f"coins_of_interest_farmed: {coin_state}")
trade = await self.get_trade_by_coin(coin_state.coin)
if trade is None:
self.log.error(f"Coin: {coin_state.coin}, not in any trade")
return
if coin_state.spent_height is None:
self.log.error(f"Coin: {coin_state.coin}, has not been spent so trade can remain valid")
# Then let's filter the offer into coins that WE offered
offer = Offer.from_bytes(trade.offer)
primary_coin_ids = [c.name() for c in offer.bundle.removals()]
our_coin_records: List[WalletCoinRecord] = await self.wallet_state_manager.coin_store.get_multiple_coin_records(
primary_coin_ids
)
our_primary_coins: List[Coin] = [cr.coin for cr in our_coin_records]
our_additions: List[Coin] = list(
filter(lambda c: offer.get_root_removal(c) in our_primary_coins, offer.bundle.additions())
)
our_addition_ids: List[bytes32] = [c.name() for c in our_additions]
# And get all relevant coin states
coin_states = await self.wallet_state_manager.wallet_node.get_coin_state(
our_addition_ids,
peer=peer,
fork_height=fork_height,
)
assert coin_states is not None
coin_state_names: List[bytes32] = [cs.coin.name() for cs in coin_states]
# If any of our settlement_payments were spent, this offer was a success!
if set(our_addition_ids) == set(coin_state_names):
height = coin_states[0].created_height
await self.trade_store.set_status(trade.trade_id, TradeStatus.CONFIRMED, height)
tx_records: List[TransactionRecord] = await self.calculate_tx_records_for_offer(offer, False)
for tx in tx_records:
if TradeStatus(trade.status) == TradeStatus.PENDING_ACCEPT:
await self.wallet_state_manager.add_transaction(
dataclasses.replace(tx, confirmed_at_height=height, confirmed=True)
)
self.log.info(f"Trade with id: {trade.trade_id} confirmed at height: {height}")
else:
# In any other scenario this trade failed
await self.wallet_state_manager.delete_trade_transactions(trade.trade_id)
if trade.status == TradeStatus.PENDING_CANCEL.value:
await self.trade_store.set_status(trade.trade_id, TradeStatus.CANCELLED)
self.log.info(f"Trade with id: {trade.trade_id} canceled")
elif trade.status == TradeStatus.PENDING_CONFIRM.value:
await self.trade_store.set_status(trade.trade_id, TradeStatus.FAILED)
self.log.warning(f"Trade with id: {trade.trade_id} failed")
async def get_locked_coins(self, wallet_id: Optional[int] = None) -> Dict[bytes32, WalletCoinRecord]:
"""Returns a dictionary of confirmed coins that are locked by a trade."""
all_pending = []
pending_accept = await self.get_offers_with_status(TradeStatus.PENDING_ACCEPT)
pending_confirm = await self.get_offers_with_status(TradeStatus.PENDING_CONFIRM)
pending_cancel = await self.get_offers_with_status(TradeStatus.PENDING_CANCEL)
all_pending.extend(pending_accept)
all_pending.extend(pending_confirm)
all_pending.extend(pending_cancel)
coins_of_interest = []
for trade_offer in all_pending:
coins_of_interest.extend([c.name() for c in trade_offer.coins_of_interest])
result = {}
coin_records = await self.wallet_state_manager.coin_store.get_multiple_coin_records(coins_of_interest)
for record in coin_records:
if wallet_id is None or record.wallet_id == wallet_id:
result[record.name()] = record
return result
async def get_all_trades(self) -> List[TradeRecord]:
all: List[TradeRecord] = await self.trade_store.get_all_trades()
return all
async def get_trade_by_id(self, trade_id: bytes32) -> Optional[TradeRecord]:
record = await self.trade_store.get_trade_record(trade_id)
return record
async def cancel_pending_offer(self, trade_id: bytes32) -> None:
await self.trade_store.set_status(trade_id, TradeStatus.CANCELLED)
self.wallet_state_manager.state_changed("offer_cancelled")
async def cancel_pending_offer_safely(
self, trade_id: bytes32, fee: uint64 = uint64(0)
) -> Optional[List[TransactionRecord]]:
"""This will create a transaction that includes coins that were offered"""
self.log.info(f"Secure-Cancel pending offer with id trade_id {trade_id.hex()}")
trade = await self.trade_store.get_trade_record(trade_id)
if trade is None:
return None
all_txs: List[TransactionRecord] = []
fee_to_pay: uint64 = fee
for coin in Offer.from_bytes(trade.offer).get_cancellation_coins():
wallet = await self.wallet_state_manager.get_wallet_for_coin(coin.name())
if wallet is None:
continue
if wallet.type() == WalletType.NFT:
new_ph = await wallet.wallet_state_manager.main_wallet.get_new_puzzlehash()
else:
new_ph = await wallet.get_new_puzzlehash()
# This should probably not switch on whether or not we're spending a XFX but it has to for now
if wallet.type() == WalletType.STANDARD_WALLET:
if fee_to_pay > coin.amount:
selected_coins: Set[Coin] = await wallet.select_coins(
uint64(fee_to_pay - coin.amount),
exclude=[coin],
)
selected_coins.add(coin)
else:
selected_coins = {coin}
tx = await wallet.generate_signed_transaction(
uint64(sum([c.amount for c in selected_coins]) - fee_to_pay),
new_ph,
fee=fee_to_pay,
coins=selected_coins,
ignore_max_send_amount=True,
)
all_txs.append(tx)
else:
# ATTENTION: new_wallets
txs = await wallet.generate_signed_transaction(
[coin.amount], [new_ph], fee=fee_to_pay, coins={coin}, ignore_max_send_amount=True
)
all_txs.extend(txs)
fee_to_pay = uint64(0)
cancellation_addition = Coin(coin.name(), new_ph, coin.amount)
all_txs.append(
TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=new_ph,
amount=uint64(coin.amount),
fee_amount=fee,
confirmed=False,
sent=uint32(10),
spend_bundle=None,
additions=[cancellation_addition],
removals=[coin],
wallet_id=wallet.id(),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.INCOMING_TX.value),
name=cancellation_addition.name(),
memos=[],
)
)
for tx in all_txs:
await self.wallet_state_manager.add_pending_transaction(tx_record=dataclasses.replace(tx, fee_amount=fee))
await self.trade_store.set_status(trade_id, TradeStatus.PENDING_CANCEL)
return all_txs
async def cancel_pending_offers(
self, trades: List[TradeRecord], fee: uint64 = uint64(0), secure: bool = True
) -> Optional[List[TransactionRecord]]:
"""This will create a transaction that includes coins that were offered"""
all_txs: List[TransactionRecord] = []
bundles: List[SpendBundle] = []
fee_to_pay: uint64 = fee
for trade in trades:
if trade is None:
self.log.error("Cannot find offer, skip cancellation.")
continue
for coin in Offer.from_bytes(trade.offer).get_primary_coins():
wallet = await self.wallet_state_manager.get_wallet_for_coin(coin.name())
if wallet is None:
self.log.error(f"Cannot find wallet for offer {trade.trade_id}, skip cancellation.")
continue
if wallet.type() == WalletType.NFT:
new_ph = await wallet.wallet_state_manager.main_wallet.get_new_puzzlehash()
else:
new_ph = await wallet.get_new_puzzlehash()
# This should probably not switch on whether or not we're spending a XFX but it has to for now
if wallet.type() == WalletType.STANDARD_WALLET:
if fee_to_pay > coin.amount:
selected_coins: Set[Coin] = await wallet.select_coins(
uint64(fee_to_pay - coin.amount),
exclude=[coin],
)
selected_coins.add(coin)
else:
selected_coins = {coin}
tx: TransactionRecord = await wallet.generate_signed_transaction(
uint64(sum([c.amount for c in selected_coins]) - fee_to_pay),
new_ph,
fee=fee_to_pay,
coins=selected_coins,
ignore_max_send_amount=True,
)
if tx is not None and tx.spend_bundle is not None:
bundles.append(tx.spend_bundle)
all_txs.append(dataclasses.replace(tx, spend_bundle=None))
else:
# ATTENTION: new_wallets
txs = await wallet.generate_signed_transaction(
[coin.amount], [new_ph], fee=fee_to_pay, coins={coin}, ignore_max_send_amount=True
)
for tx in txs:
if tx is not None and tx.spend_bundle is not None:
bundles.append(tx.spend_bundle)
all_txs.append(dataclasses.replace(tx, spend_bundle=None))
fee_to_pay = uint64(0)
cancellation_addition = Coin(coin.name(), new_ph, coin.amount)
all_txs.append(
TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=new_ph,
amount=uint64(coin.amount),
fee_amount=fee,
confirmed=False,
sent=uint32(10),
spend_bundle=None,
additions=[cancellation_addition],
removals=[coin],
wallet_id=wallet.id(),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.INCOMING_TX.value),
name=cancellation_addition.name(),
memos=[],
)
)
# Aggregate spend bundles to the first tx
if len(all_txs) > 0:
all_txs[0] = dataclasses.replace(all_txs[0], spend_bundle=SpendBundle.aggregate(bundles))
if secure:
for tx in all_txs:
await self.wallet_state_manager.add_pending_transaction(
tx_record=dataclasses.replace(tx, fee_amount=fee)
)
else:
self.wallet_state_manager.state_changed("offer_cancelled")
for trade in trades:
if secure:
await self.trade_store.set_status(trade.trade_id, TradeStatus.PENDING_CANCEL)
else:
await self.trade_store.set_status(trade.trade_id, TradeStatus.CANCELLED)
return all_txs
async def save_trade(self, trade: TradeRecord) -> None:
await self.trade_store.add_trade_record(trade)
self.wallet_state_manager.state_changed("offer_added")
async def create_offer_for_ids(
self,
offer: Dict[Union[int, bytes32], int],
driver_dict: Optional[Dict[bytes32, PuzzleInfo]] = None,
solver: Optional[Solver] = None,
fee: uint64 = uint64(0),
validate_only: bool = False,
min_coin_amount: Optional[uint64] = None,
) -> Union[Tuple[Literal[True], TradeRecord, None], Tuple[Literal[False], None, str]]:
if driver_dict is None:
driver_dict = {}
if solver is None:
solver = Solver({})
result = await self._create_offer_for_ids(offer, driver_dict, solver, fee=fee, min_coin_amount=min_coin_amount)
if not result[0] or result[1] is None:
raise Exception(f"Error creating offer: {result[2]}")
success, created_offer, error = result
now = uint64(int(time.time()))
trade_offer: TradeRecord = TradeRecord(
confirmed_at_index=uint32(0),
accepted_at_time=None,
created_at_time=now,
is_my_offer=True,
sent=uint32(0),
offer=bytes(created_offer),
taken_offer=None,
coins_of_interest=created_offer.get_involved_coins(),
trade_id=created_offer.name(),
status=uint32(TradeStatus.PENDING_ACCEPT.value),
sent_to=[],
)
if success is True and trade_offer is not None and not validate_only:
await self.save_trade(trade_offer)
return success, trade_offer, error
async def _create_offer_for_ids(
self,
offer_dict: Dict[Union[int, bytes32], int],
driver_dict: Optional[Dict[bytes32, PuzzleInfo]] = None,
solver: Optional[Solver] = None,
fee: uint64 = uint64(0),
min_coin_amount: Optional[uint64] = None,
) -> Union[Tuple[Literal[True], Offer, None], Tuple[Literal[False], None, str]]:
"""
Offer is dictionary of wallet ids and amount
"""
if driver_dict is None:
driver_dict = {}
if solver is None:
solver = Solver({})
try:
coins_to_offer: Dict[Union[int, bytes32], List[Coin]] = {}
requested_payments: Dict[Optional[bytes32], List[Payment]] = {}
offer_dict_no_ints: Dict[Optional[bytes32], int] = {}
for id, amount in offer_dict.items():
asset_id: Optional[bytes32] = None
# asset_id can either be none if asset is XFX or
# bytes32 if another asset (e.g. NFT, CAT)
if amount > 0:
# this is what we are receiving in the trade
memos: List[bytes] = []
if isinstance(id, int):
wallet_id = uint32(id)
wallet = self.wallet_state_manager.wallets[wallet_id]
p2_ph: bytes32 = await wallet.get_new_puzzlehash()
if wallet.type() != WalletType.STANDARD_WALLET:
if callable(getattr(wallet, "get_asset_id", None)): # ATTENTION: new wallets
asset_id = bytes32(bytes.fromhex(wallet.get_asset_id()))
memos = [p2_ph]
else:
raise ValueError(
f"Cannot request assets from wallet id {wallet.id()} without more information"
)
else:
p2_ph = await self.wallet_state_manager.main_wallet.get_new_puzzlehash()
asset_id = id
wallet = await self.wallet_state_manager.get_wallet_for_asset_id(asset_id.hex())
memos = [p2_ph]
requested_payments[asset_id] = [Payment(p2_ph, uint64(amount), memos)]
elif amount < 0:
# this is what we are sending in the trade
if isinstance(id, int):
wallet_id = uint32(id)
wallet = self.wallet_state_manager.wallets[wallet_id]
if wallet.type() != WalletType.STANDARD_WALLET:
if callable(getattr(wallet, "get_asset_id", None)): # ATTENTION: new wallets
asset_id = bytes32(bytes.fromhex(wallet.get_asset_id()))
else:
raise ValueError(
f"Cannot offer assets from wallet id {wallet.id()} without more information"
)
else:
asset_id = id
wallet = await self.wallet_state_manager.get_wallet_for_asset_id(asset_id.hex())
if not callable(getattr(wallet, "get_coins_to_offer", None)): # ATTENTION: new wallets
raise ValueError(f"Cannot offer coins from wallet id {wallet.id()}")
coins_to_offer[id] = await wallet.get_coins_to_offer(asset_id, uint64(abs(amount)), min_coin_amount)
# Note: if we use check_for_special_offer_making, this is not used.
elif amount == 0:
raise ValueError("You cannot offer nor request 0 amount of something")
offer_dict_no_ints[asset_id] = amount
if asset_id is not None and wallet is not None: # if this asset is not XFX
if callable(getattr(wallet, "get_puzzle_info", None)):
puzzle_driver: PuzzleInfo = await wallet.get_puzzle_info(asset_id)
if asset_id in driver_dict and driver_dict[asset_id] != puzzle_driver:
# ignore the case if we're an nft transferring the did owner
if self.check_for_owner_change_in_drivers(puzzle_driver, driver_dict[asset_id]):
driver_dict[asset_id] = puzzle_driver
else:
raise ValueError(
f"driver_dict specified {driver_dict[asset_id]}, was expecting {puzzle_driver}"
)
else:
driver_dict[asset_id] = puzzle_driver
else:
raise ValueError(f"Wallet for asset id {asset_id} is not properly integrated with TradeManager")
potential_special_offer: Optional[Offer] = await self.check_for_special_offer_making(
offer_dict_no_ints, driver_dict, solver, fee, min_coin_amount
)
if potential_special_offer is not None:
return True, potential_special_offer, None
all_coins: List[Coin] = [c for coins in coins_to_offer.values() for c in coins]
notarized_payments: Dict[Optional[bytes32], List[NotarizedPayment]] = Offer.notarize_payments(
requested_payments, all_coins
)
announcements_to_assert = Offer.calculate_announcements(notarized_payments, driver_dict)
all_transactions: List[TransactionRecord] = []
fee_left_to_pay: uint64 = fee
for id, selected_coins in coins_to_offer.items():
if isinstance(id, int):
wallet = self.wallet_state_manager.wallets[id]
else:
wallet = await self.wallet_state_manager.get_wallet_for_asset_id(id.hex())
# This should probably not switch on whether or not we're spending XFX but it has to for now
if wallet.type() == WalletType.STANDARD_WALLET:
tx = await wallet.generate_signed_transaction(
abs(offer_dict[id]),
Offer.ph(),
fee=fee_left_to_pay,
coins=set(selected_coins),
puzzle_announcements_to_consume=announcements_to_assert,
)
all_transactions.append(tx)
elif wallet.type() == WalletType.NFT:
# This is to generate the tx for specific nft assets, i.e. not using
# wallet_id as the selector which would select any coins from nft_wallet
amounts = [coin.amount for coin in selected_coins]
txs = await wallet.generate_signed_transaction(
# [abs(offer_dict[id])],
amounts,
[Offer.ph()],
fee=fee_left_to_pay,
coins=set(selected_coins),
puzzle_announcements_to_consume=announcements_to_assert,
)
all_transactions.extend(txs)
else:
# ATTENTION: new_wallets
txs = await wallet.generate_signed_transaction(
[abs(offer_dict[id])],
[Offer.ph()],
fee=fee_left_to_pay,
coins=set(selected_coins),
puzzle_announcements_to_consume=announcements_to_assert,
)
all_transactions.extend(txs)
fee_left_to_pay = uint64(0)
total_spend_bundle = SpendBundle.aggregate(
[x.spend_bundle for x in all_transactions if x.spend_bundle is not None]
)
offer = Offer(notarized_payments, total_spend_bundle, driver_dict)
return True, offer, None
except Exception as e:
tb = traceback.format_exc()
self.log.error(f"Error with creating trade offer: {type(e)}{tb}")
return False, None, str(e)
async def maybe_create_wallets_for_offer(self, offer: Offer) -> None:
for key in offer.arbitrage():
wsm = self.wallet_state_manager
if key is None:
continue
# ATTENTION: new_wallets
exists: Optional[Wallet] = await wsm.get_wallet_for_puzzle_info(offer.driver_dict[key])
if exists is None:
await wsm.create_wallet_for_puzzle_info(offer.driver_dict[key])
async def check_offer_validity(self, offer: Offer, peer: WSFlaxConnection) -> bool:
all_removals: List[Coin] = offer.bundle.removals()
all_removal_names: List[bytes32] = [c.name() for c in all_removals]
non_ephemeral_removals: List[Coin] = list(
filter(lambda c: c.parent_coin_info not in all_removal_names, all_removals)
)
coin_states = await self.wallet_state_manager.wallet_node.get_coin_state(
[c.name() for c in non_ephemeral_removals], peer=peer
)
return len(coin_states) == len(non_ephemeral_removals) and all([cs.spent_height is None for cs in coin_states])
async def calculate_tx_records_for_offer(self, offer: Offer, validate: bool) -> List[TransactionRecord]:
if validate:
final_spend_bundle: SpendBundle = offer.to_valid_spend()
else:
final_spend_bundle = offer.bundle
settlement_coins: List[Coin] = [c for coins in offer.get_offered_coins().values() for c in coins]
settlement_coin_ids: List[bytes32] = [c.name() for c in settlement_coins]
additions: List[Coin] = final_spend_bundle.not_ephemeral_additions()
removals: List[Coin] = final_spend_bundle.removals()
all_fees = uint64(final_spend_bundle.fees())
txs = []
addition_dict: Dict[uint32, List[Coin]] = {}
for addition in additions:
wallet_info = await self.wallet_state_manager.get_wallet_id_for_puzzle_hash(addition.puzzle_hash)
if wallet_info is not None:
wallet_id, _ = wallet_info
if addition.parent_coin_info in settlement_coin_ids:
wallet = self.wallet_state_manager.wallets[wallet_id]
to_puzzle_hash = await wallet.convert_puzzle_hash(addition.puzzle_hash) # ATTENTION: new wallets
txs.append(
TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=to_puzzle_hash,
amount=uint64(addition.amount),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(10),
spend_bundle=None,
additions=[addition],
removals=[],
wallet_id=wallet_id,
sent_to=[],
trade_id=offer.name(),
type=uint32(TransactionType.INCOMING_TRADE.value),
name=std_hash(final_spend_bundle.name() + addition.name()),
memos=[],
)
)
else: # This is change
addition_dict.setdefault(wallet_id, [])
addition_dict[wallet_id].append(addition)
# While we want additions to show up as separate records, removals of the same wallet should show as one
removal_dict: Dict[uint32, List[Coin]] = {}
for removal in removals:
wallet_info = await self.wallet_state_manager.get_wallet_id_for_puzzle_hash(removal.puzzle_hash)
if wallet_info is not None:
wallet_id, _ = wallet_info
removal_dict.setdefault(wallet_id, [])
removal_dict[wallet_id].append(removal)
all_removals: List[bytes32] = [r.name() for removals in removal_dict.values() for r in removals]
for wid, grouped_removals in removal_dict.items():
wallet = self.wallet_state_manager.wallets[wid]
to_puzzle_hash = bytes32([1] * 32) # We use all zeros to be clear not to send here
removal_tree_hash = Program.to([coin_as_list(rem) for rem in grouped_removals]).get_tree_hash()
# We also need to calculate the sent amount
removed: int = sum(c.amount for c in grouped_removals)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | true |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/derivation_record.py | flax/wallet/derivation_record.py | from __future__ import annotations
from dataclasses import dataclass
from blspy import G1Element
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.ints import uint32
from flax.wallet.util.wallet_types import WalletType
@dataclass(frozen=True)
class DerivationRecord:
"""
These are records representing a puzzle hash, which is generated from a
public key, derivation index, and wallet type. Stored in the puzzle_store.
"""
index: uint32
puzzle_hash: bytes32
pubkey: G1Element
wallet_type: WalletType
wallet_id: uint32
hardened: bool
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/wallet_action.py | flax/wallet/wallet_action.py | from __future__ import annotations
from dataclasses import dataclass
from typing import Optional
from flax.util.ints import uint32
from flax.wallet.util.wallet_types import WalletType
@dataclass(frozen=True)
class WalletAction:
"""
This object represents the wallet action as it is stored in the database.
Purpose:
Some wallets require wallet node to perform a certain action when event happens.
For Example, CAT wallet needs to fetch solutions once it receives a coin.
In order to be safe from losing connection, closing the app, etc, those actions need to be persisted.
id: auto-incremented for every added action
name: Specified by the wallet
Wallet_id: ID of the wallet that created this action
type: Type of the wallet that created this action
wallet_callback: Name of the callback function in the wallet that created this action, if specified it will
get called when action has been performed.
done: Indicates if the action has been performed
data: JSON encoded string containing any data wallet or a wallet_node needs for this specific action.
"""
id: uint32
name: str
wallet_id: int
type: WalletType
wallet_callback: Optional[str]
done: bool
data: str
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/secret_key_store.py | flax/wallet/secret_key_store.py | from __future__ import annotations
from typing import Dict, Optional
from blspy import G1Element, PrivateKey
GROUP_ORDER = 0x73EDA753299D7D483339D80809A1D80553BDA402FFFE5BFEFFFFFFFF00000001
class SecretKeyStore:
_pk2sk: Dict[G1Element, PrivateKey]
def __init__(self):
self._pk2sk = {}
def save_secret_key(self, secret_key: PrivateKey):
public_key = secret_key.get_g1()
self._pk2sk[bytes(public_key)] = secret_key
def secret_key_for_public_key(self, public_key: G1Element) -> Optional[PrivateKey]:
return self._pk2sk.get(bytes(public_key))
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/uncurried_puzzle.py | flax/wallet/uncurried_puzzle.py | from __future__ import annotations
from dataclasses import dataclass
from flax.types.blockchain_format.program import Program
@dataclass(frozen=True)
class UncurriedPuzzle:
mod: Program
args: Program
def uncurry_puzzle(puzzle: Program) -> UncurriedPuzzle:
return UncurriedPuzzle(*puzzle.uncurry())
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/payment.py | flax/wallet/payment.py | from dataclasses import dataclass
from typing import List
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.blockchain_format.program import Program
from flax.util.ints import uint64
# This class is supposed to correspond to a CREATE_COIN condition
@dataclass(frozen=True)
class Payment:
puzzle_hash: bytes32
amount: uint64
memos: List[bytes]
def as_condition_args(self) -> List:
return [self.puzzle_hash, self.amount, self.memos]
def as_condition(self) -> Program:
return Program.to([51, *self.as_condition_args()])
def name(self) -> bytes32:
return self.as_condition().get_tree_hash()
@classmethod
def from_condition(cls, condition: Program) -> "Payment":
python_condition: List = condition.as_python()
puzzle_hash, amount = python_condition[1:3]
memos: List[bytes] = []
if len(python_condition) > 3:
memos = python_condition[3]
return cls(bytes32(puzzle_hash), uint64(int.from_bytes(amount, "big")), memos)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/wallet.py | flax/wallet/wallet.py | from __future__ import annotations
import logging
import time
from typing import Any, Dict, List, Optional, Set, TYPE_CHECKING, Tuple
from blspy import G1Element, G2Element, AugSchemeMPL
from flax.consensus.cost_calculator import NPCResult
from flax.full_node.bundle_tools import simple_solution_generator
from flax.full_node.mempool_check_conditions import get_name_puzzle_conditions
from flax.types.announcement import Announcement
from flax.types.blockchain_format.coin import Coin
from flax.types.blockchain_format.program import Program, SerializedProgram
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.coin_spend import CoinSpend
from flax.types.generator_types import BlockGenerator
from flax.types.spend_bundle import SpendBundle
from flax.util.hash import std_hash
from flax.util.ints import uint8, uint32, uint64, uint128
from flax.wallet.coin_selection import select_coins
from flax.wallet.derivation_record import DerivationRecord
from flax.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle import (
DEFAULT_HIDDEN_PUZZLE_HASH,
calculate_synthetic_secret_key,
puzzle_for_pk,
puzzle_hash_for_pk,
solution_for_conditions,
)
from flax.wallet.puzzles.puzzle_utils import (
make_assert_absolute_seconds_exceeds_condition,
make_assert_coin_announcement,
make_assert_my_coin_id_condition,
make_assert_puzzle_announcement,
make_create_coin_announcement,
make_create_coin_condition,
make_create_puzzle_announcement,
make_reserve_fee_condition,
)
from flax.wallet.secret_key_store import SecretKeyStore
from flax.wallet.sign_coin_spends import sign_coin_spends
from flax.wallet.transaction_record import TransactionRecord
from flax.wallet.util.compute_memos import compute_memos
from flax.wallet.util.transaction_type import TransactionType
from flax.wallet.util.wallet_types import AmountWithPuzzlehash, WalletType
from flax.wallet.wallet_coin_record import WalletCoinRecord
from flax.wallet.wallet_info import WalletInfo
if TYPE_CHECKING:
from flax.server.ws_connection import WSFlaxConnection
class Wallet:
wallet_state_manager: Any
log: logging.Logger
wallet_id: uint32
secret_key_store: SecretKeyStore
cost_of_single_tx: Optional[int]
@staticmethod
async def create(
wallet_state_manager: Any,
info: WalletInfo,
name: str = None,
):
self = Wallet()
self.log = logging.getLogger(name if name else __name__)
self.wallet_state_manager = wallet_state_manager
self.wallet_id = info.id
self.secret_key_store = SecretKeyStore()
self.cost_of_single_tx = None
return self
async def get_max_send_amount(self, records: Optional[Set[WalletCoinRecord]] = None) -> uint128:
spendable: List[WalletCoinRecord] = list(
await self.wallet_state_manager.get_spendable_coins_for_wallet(self.id(), records)
)
if len(spendable) == 0:
return uint128(0)
spendable.sort(reverse=True, key=lambda record: record.coin.amount)
if self.cost_of_single_tx is None:
coin = spendable[0].coin
tx = await self.generate_signed_transaction(
uint64(coin.amount), coin.puzzle_hash, coins={coin}, ignore_max_send_amount=True
)
assert tx.spend_bundle is not None
program: BlockGenerator = simple_solution_generator(tx.spend_bundle)
# npc contains names of the coins removed, puzzle_hashes and their spend conditions
result: NPCResult = get_name_puzzle_conditions(
program,
self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM,
cost_per_byte=self.wallet_state_manager.constants.COST_PER_BYTE,
mempool_mode=True,
)
self.cost_of_single_tx = result.cost
self.log.info(f"Cost of a single tx for standard wallet: {self.cost_of_single_tx}")
max_cost = self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM / 5 # avoid full block TXs
current_cost = 0
total_amount = 0
total_coin_count = 0
for record in spendable:
current_cost += self.cost_of_single_tx
total_amount += record.coin.amount
total_coin_count += 1
if current_cost + self.cost_of_single_tx > max_cost:
break
return uint128(total_amount)
@classmethod
def type(cls) -> uint8:
return uint8(WalletType.STANDARD_WALLET)
def id(self) -> uint32:
return self.wallet_id
async def get_confirmed_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint128:
return await self.wallet_state_manager.get_confirmed_balance_for_wallet(self.id(), record_list)
async def get_unconfirmed_balance(self, unspent_records: Optional[Set[WalletCoinRecord]] = None) -> uint128:
return await self.wallet_state_manager.get_unconfirmed_balance(self.id(), unspent_records)
async def get_spendable_balance(self, unspent_records: Optional[Set[WalletCoinRecord]] = None) -> uint128:
spendable = await self.wallet_state_manager.get_confirmed_spendable_balance_for_wallet(
self.id(), unspent_records
)
return spendable
async def get_pending_change_balance(self) -> uint64:
unconfirmed_tx: List[TransactionRecord] = await self.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(
self.id()
)
addition_amount = 0
for record in unconfirmed_tx:
if not record.is_in_mempool():
if record.spend_bundle is not None:
self.log.warning(f"Record: {record} not in mempool, {record.sent_to}")
continue
our_spend = False
for coin in record.removals:
if await self.wallet_state_manager.does_coin_belong_to_wallet(coin, self.id()):
our_spend = True
break
if our_spend is not True:
continue
for coin in record.additions:
if await self.wallet_state_manager.does_coin_belong_to_wallet(coin, self.id()):
addition_amount += coin.amount
return uint64(addition_amount)
def require_derivation_paths(self) -> bool:
return True
def puzzle_for_pk(self, pubkey: G1Element) -> Program:
return puzzle_for_pk(pubkey)
def puzzle_hash_for_pk(self, pubkey: G1Element) -> bytes32:
return puzzle_hash_for_pk(pubkey)
async def convert_puzzle_hash(self, puzzle_hash: bytes32) -> bytes32:
return puzzle_hash # Looks unimpressive, but it's more complicated in other wallets
async def hack_populate_secret_key_for_puzzle_hash(self, puzzle_hash: bytes32) -> G1Element:
maybe = await self.wallet_state_manager.get_keys(puzzle_hash)
if maybe is None:
error_msg = f"Wallet couldn't find keys for puzzle_hash {puzzle_hash}"
self.log.error(error_msg)
raise ValueError(error_msg)
# Get puzzle for pubkey
public_key, secret_key = maybe
# HACK
synthetic_secret_key = calculate_synthetic_secret_key(secret_key, DEFAULT_HIDDEN_PUZZLE_HASH)
self.secret_key_store.save_secret_key(synthetic_secret_key)
return public_key
async def hack_populate_secret_keys_for_coin_spends(self, coin_spends: List[CoinSpend]) -> None:
"""
This hack forces secret keys into the `_pk2sk` lookup. This should eventually be replaced
by a persistent DB table that can do this look-up directly.
"""
for coin_spend in coin_spends:
await self.hack_populate_secret_key_for_puzzle_hash(coin_spend.coin.puzzle_hash)
async def puzzle_for_puzzle_hash(self, puzzle_hash: bytes32) -> Program:
public_key = await self.hack_populate_secret_key_for_puzzle_hash(puzzle_hash)
return puzzle_for_pk(public_key)
async def get_new_puzzle(self) -> Program:
dr = await self.wallet_state_manager.get_unused_derivation_record(self.id())
puzzle = puzzle_for_pk(dr.pubkey)
await self.hack_populate_secret_key_for_puzzle_hash(puzzle.get_tree_hash())
return puzzle
async def get_puzzle_hash(self, new: bool) -> bytes32:
if new:
return await self.get_new_puzzlehash()
else:
record: Optional[
DerivationRecord
] = await self.wallet_state_manager.get_current_derivation_record_for_wallet(self.id())
if record is None:
return await self.get_new_puzzlehash()
return record.puzzle_hash
async def get_new_puzzlehash(self) -> bytes32:
puzhash = (await self.wallet_state_manager.get_unused_derivation_record(self.id())).puzzle_hash
await self.hack_populate_secret_key_for_puzzle_hash(puzhash)
return puzhash
def make_solution(
self,
primaries: List[AmountWithPuzzlehash],
min_time=0,
me=None,
coin_announcements: Optional[Set[bytes]] = None,
coin_announcements_to_assert: Optional[Set[bytes32]] = None,
puzzle_announcements: Optional[Set[bytes]] = None,
puzzle_announcements_to_assert: Optional[Set[bytes32]] = None,
fee=0,
) -> Program:
assert fee >= 0
condition_list = []
if len(primaries) > 0:
for primary in primaries:
if "memos" in primary:
memos: Optional[List[bytes]] = primary["memos"]
if memos is not None and len(memos) == 0:
memos = None
else:
memos = None
condition_list.append(make_create_coin_condition(primary["puzzlehash"], primary["amount"], memos))
if min_time > 0:
condition_list.append(make_assert_absolute_seconds_exceeds_condition(min_time))
if me:
condition_list.append(make_assert_my_coin_id_condition(me["id"]))
if fee:
condition_list.append(make_reserve_fee_condition(fee))
if coin_announcements:
for announcement in coin_announcements:
condition_list.append(make_create_coin_announcement(announcement))
if coin_announcements_to_assert:
for announcement_hash in coin_announcements_to_assert:
condition_list.append(make_assert_coin_announcement(announcement_hash))
if puzzle_announcements:
for announcement in puzzle_announcements:
condition_list.append(make_create_puzzle_announcement(announcement))
if puzzle_announcements_to_assert:
for announcement_hash in puzzle_announcements_to_assert:
condition_list.append(make_assert_puzzle_announcement(announcement_hash))
return solution_for_conditions(condition_list)
def add_condition_to_solution(self, condition: Program, solution: Program) -> Program:
python_program = solution.as_python()
python_program[1].append(condition)
return Program.to(python_program)
async def select_coins(
self,
amount: uint64,
exclude: Optional[List[Coin]] = None,
min_coin_amount: Optional[uint64] = None,
max_coin_amount: Optional[uint64] = None,
) -> Set[Coin]:
"""
Returns a set of coins that can be used for generating a new transaction.
Note: Must be called under wallet state manager lock
"""
spendable_amount: uint128 = await self.get_spendable_balance()
spendable_coins: List[WalletCoinRecord] = list(
await self.wallet_state_manager.get_spendable_coins_for_wallet(self.id())
)
# Try to use coins from the store, if there isn't enough of "unused"
# coins use change coins that are not confirmed yet
unconfirmed_removals: Dict[bytes32, Coin] = await self.wallet_state_manager.unconfirmed_removals_for_wallet(
self.id()
)
if max_coin_amount is None:
max_coin_amount = uint64(self.wallet_state_manager.constants.MAX_COIN_AMOUNT)
coins = await select_coins(
spendable_amount,
max_coin_amount,
spendable_coins,
unconfirmed_removals,
self.log,
uint128(amount),
exclude,
min_coin_amount,
)
assert sum(c.amount for c in coins) >= amount
return coins
async def _generate_unsigned_transaction(
self,
amount: uint64,
newpuzzlehash: bytes32,
fee: uint64 = uint64(0),
origin_id: bytes32 = None,
coins: Set[Coin] = None,
primaries_input: Optional[List[AmountWithPuzzlehash]] = None,
ignore_max_send_amount: bool = False,
coin_announcements_to_consume: Set[Announcement] = None,
puzzle_announcements_to_consume: Set[Announcement] = None,
memos: Optional[List[bytes]] = None,
negative_change_allowed: bool = False,
min_coin_amount: Optional[uint64] = None,
exclude_coins: Optional[Set[Coin]] = None,
) -> List[CoinSpend]:
"""
Generates a unsigned transaction in form of List(Puzzle, Solutions)
Note: this must be called under a wallet state manager lock
"""
if primaries_input is None:
primaries: Optional[List[AmountWithPuzzlehash]] = None
total_amount = amount + fee
else:
primaries = primaries_input.copy()
primaries_amount = 0
for prim in primaries:
primaries_amount += prim["amount"]
total_amount = amount + fee + primaries_amount
if not ignore_max_send_amount:
max_send = await self.get_max_send_amount()
if total_amount > max_send:
raise ValueError(f"Can't send more than {max_send} in a single transaction")
self.log.debug("Got back max send amount: %s", max_send)
if coins is None:
exclude_coins_list: Optional[List[Coin]] = None
if exclude_coins is not None:
exclude_coins_list = list(exclude_coins)
coins = await self.select_coins(
uint64(total_amount), min_coin_amount=min_coin_amount, exclude=exclude_coins_list
)
elif exclude_coins is not None:
raise ValueError("Can't exclude coins when also specifically including coins")
assert len(coins) > 0
self.log.info(f"coins is not None {coins}")
spend_value = sum([coin.amount for coin in coins])
change = spend_value - total_amount
if negative_change_allowed:
change = max(0, change)
assert change >= 0
if coin_announcements_to_consume is not None:
coin_announcements_bytes: Optional[Set[bytes32]] = {a.name() for a in coin_announcements_to_consume}
else:
coin_announcements_bytes = None
if puzzle_announcements_to_consume is not None:
puzzle_announcements_bytes: Optional[Set[bytes32]] = {a.name() for a in puzzle_announcements_to_consume}
else:
puzzle_announcements_bytes = None
spends: List[CoinSpend] = []
primary_announcement_hash: Optional[bytes32] = None
# Check for duplicates
if primaries is not None:
all_primaries_list = [(p["puzzlehash"], p["amount"]) for p in primaries] + [(newpuzzlehash, amount)]
if len(set(all_primaries_list)) != len(all_primaries_list):
raise ValueError("Cannot create two identical coins")
if memos is None:
memos = []
assert memos is not None
for coin in coins:
# Only one coin creates outputs
if origin_id in (None, coin.name()):
origin_id = coin.name()
if primaries is None:
if amount > 0:
primaries = [{"puzzlehash": newpuzzlehash, "amount": uint64(amount), "memos": memos}]
else:
primaries = []
else:
primaries.append({"puzzlehash": newpuzzlehash, "amount": uint64(amount), "memos": memos})
if change > 0:
change_puzzle_hash: bytes32 = await self.get_new_puzzlehash()
primaries.append({"puzzlehash": change_puzzle_hash, "amount": uint64(change), "memos": []})
message_list: List[bytes32] = [c.name() for c in coins]
for primary in primaries:
message_list.append(Coin(coin.name(), primary["puzzlehash"], primary["amount"]).name())
message: bytes32 = std_hash(b"".join(message_list))
puzzle: Program = await self.puzzle_for_puzzle_hash(coin.puzzle_hash)
solution: Program = self.make_solution(
primaries=primaries,
fee=fee,
coin_announcements={message},
coin_announcements_to_assert=coin_announcements_bytes,
puzzle_announcements_to_assert=puzzle_announcements_bytes,
)
primary_announcement_hash = Announcement(coin.name(), message).name()
spends.append(
CoinSpend(
coin, SerializedProgram.from_bytes(bytes(puzzle)), SerializedProgram.from_bytes(bytes(solution))
)
)
break
else:
raise ValueError("origin_id is not in the set of selected coins")
# Process the non-origin coins now that we have the primary announcement hash
for coin in coins:
if coin.name() == origin_id:
continue
puzzle = await self.puzzle_for_puzzle_hash(coin.puzzle_hash)
solution = self.make_solution(primaries=[], coin_announcements_to_assert={primary_announcement_hash})
spends.append(
CoinSpend(
coin, SerializedProgram.from_bytes(bytes(puzzle)), SerializedProgram.from_bytes(bytes(solution))
)
)
self.log.debug(f"Spends is {spends}")
return spends
async def sign_transaction(self, coin_spends: List[CoinSpend]) -> SpendBundle:
return await sign_coin_spends(
coin_spends,
self.secret_key_store.secret_key_for_public_key,
self.wallet_state_manager.constants.AGG_SIG_ME_ADDITIONAL_DATA,
self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM,
)
async def sign_message(self, message: str, puzzle_hash: bytes32) -> Tuple[G1Element, G2Element]:
pubkey, private = await self.wallet_state_manager.get_keys(puzzle_hash)
synthetic_secret_key = calculate_synthetic_secret_key(private, DEFAULT_HIDDEN_PUZZLE_HASH)
synthetic_pk = synthetic_secret_key.get_g1()
puzzle: Program = Program.to(("Flax Signed Message", message))
return synthetic_pk, AugSchemeMPL.sign(synthetic_secret_key, puzzle.get_tree_hash())
async def generate_signed_transaction(
self,
amount: uint64,
puzzle_hash: bytes32,
fee: uint64 = uint64(0),
origin_id: bytes32 = None,
coins: Set[Coin] = None,
primaries: Optional[List[AmountWithPuzzlehash]] = None,
ignore_max_send_amount: bool = False,
coin_announcements_to_consume: Set[Announcement] = None,
puzzle_announcements_to_consume: Set[Announcement] = None,
memos: Optional[List[bytes]] = None,
negative_change_allowed: bool = False,
min_coin_amount: Optional[uint64] = None,
exclude_coins: Optional[Set[Coin]] = None,
) -> TransactionRecord:
"""
Use this to generate transaction.
Note: this must be called under a wallet state manager lock
The first output is (amount, puzzle_hash, memos), and the rest of the outputs are in primaries.
"""
if primaries is None:
non_change_amount = amount
else:
non_change_amount = uint64(amount + sum(p["amount"] for p in primaries))
self.log.debug("Generating transaction for: %s %s %s", puzzle_hash, amount, repr(coins))
transaction = await self._generate_unsigned_transaction(
amount,
puzzle_hash,
fee,
origin_id,
coins,
primaries,
ignore_max_send_amount,
coin_announcements_to_consume,
puzzle_announcements_to_consume,
memos,
negative_change_allowed,
min_coin_amount=min_coin_amount,
exclude_coins=exclude_coins,
)
assert len(transaction) > 0
self.log.info("About to sign a transaction: %s", transaction)
await self.hack_populate_secret_keys_for_coin_spends(transaction)
spend_bundle: SpendBundle = await sign_coin_spends(
transaction,
self.secret_key_store.secret_key_for_public_key,
self.wallet_state_manager.constants.AGG_SIG_ME_ADDITIONAL_DATA,
self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM,
)
now = uint64(int(time.time()))
add_list: List[Coin] = list(spend_bundle.additions())
rem_list: List[Coin] = list(spend_bundle.removals())
output_amount = sum(a.amount for a in add_list) + fee
input_amount = sum(r.amount for r in rem_list)
if negative_change_allowed:
assert output_amount >= input_amount
else:
assert output_amount == input_amount
return TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=now,
to_puzzle_hash=puzzle_hash,
amount=uint64(non_change_amount),
fee_amount=uint64(fee),
confirmed=False,
sent=uint32(0),
spend_bundle=spend_bundle,
additions=add_list,
removals=rem_list,
wallet_id=self.id(),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.OUTGOING_TX.value),
name=spend_bundle.name(),
memos=list(compute_memos(spend_bundle).items()),
)
async def push_transaction(self, tx: TransactionRecord) -> None:
"""Use this API to send transactions."""
await self.wallet_state_manager.add_pending_transaction(tx)
await self.wallet_state_manager.wallet_node.update_ui()
# This is to be aggregated together with a CAT offer to ensure that the trade happens
async def create_spend_bundle_relative_flax(self, flax_amount: int, exclude: List[Coin] = []) -> SpendBundle:
list_of_solutions = []
utxos = None
# If we're losing value then get coins with at least that much value
# If we're gaining value then our amount doesn't matter
if flax_amount < 0:
utxos = await self.select_coins(uint64(abs(flax_amount)), exclude)
else:
utxos = await self.select_coins(uint64(0), exclude)
assert len(utxos) > 0
# Calculate output amount given sum of utxos
spend_value = sum([coin.amount for coin in utxos])
flax_amount = spend_value + flax_amount
# Create coin solutions for each utxo
output_created = None
for coin in utxos:
puzzle = await self.puzzle_for_puzzle_hash(coin.puzzle_hash)
if output_created is None:
newpuzhash = await self.get_new_puzzlehash()
primaries: List[AmountWithPuzzlehash] = [
{"puzzlehash": newpuzhash, "amount": uint64(flax_amount), "memos": []}
]
solution = self.make_solution(primaries=primaries)
output_created = coin
list_of_solutions.append(CoinSpend(coin, puzzle, solution))
await self.hack_populate_secret_keys_for_coin_spends(list_of_solutions)
spend_bundle = await sign_coin_spends(
list_of_solutions,
self.secret_key_store.secret_key_for_public_key,
self.wallet_state_manager.constants.AGG_SIG_ME_ADDITIONAL_DATA,
self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM,
)
return spend_bundle
async def get_coins_to_offer(
self, asset_id: Optional[bytes32], amount: uint64, min_coin_amount: Optional[uint64] = None
) -> Set[Coin]:
if asset_id is not None:
raise ValueError(f"The standard wallet cannot offer coins with asset id {asset_id}")
balance = await self.get_confirmed_balance()
if balance < amount:
raise Exception(f"insufficient funds in wallet {self.id()}")
return await self.select_coins(amount, min_coin_amount=min_coin_amount)
# WSFlaxConnection is only imported for type checking
async def coin_added(
self, coin: Coin, height: uint32, peer: WSFlaxConnection
) -> None: # pylint: disable=used-before-assignment
pass
if TYPE_CHECKING:
from flax.wallet.wallet_protocol import WalletProtocol
_dummy: WalletProtocol = Wallet()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/wallet_transaction_store.py | flax/wallet/wallet_transaction_store.py | from __future__ import annotations
import dataclasses
import time
from typing import Dict, List, Optional, Tuple
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.mempool_inclusion_status import MempoolInclusionStatus
from flax.util.db_wrapper import DBWrapper2
from flax.util.errors import Err
from flax.util.ints import uint8, uint32
from flax.wallet.transaction_record import TransactionRecord
from flax.wallet.transaction_sorting import SortKey
from flax.wallet.util.transaction_type import TransactionType
def filter_ok_mempool_status(sent_to: List[Tuple[str, uint8, Optional[str]]]) -> List[Tuple[str, uint8, Optional[str]]]:
"""Remove SUCCESS and PENDING status records from a TransactionRecord sent_to field"""
new_sent_to = []
for peer, status, err in sent_to:
if status == MempoolInclusionStatus.FAILED.value:
new_sent_to.append((peer, status, err))
return new_sent_to
class WalletTransactionStore:
"""
WalletTransactionStore stores transaction history for the wallet.
"""
db_wrapper: DBWrapper2
tx_submitted: Dict[bytes32, Tuple[int, int]] # tx_id: [time submitted: count]
last_wallet_tx_resend_time: int # Epoch time in seconds
@classmethod
async def create(cls, db_wrapper: DBWrapper2):
self = cls()
self.db_wrapper = db_wrapper
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute(
(
"CREATE TABLE IF NOT EXISTS transaction_record("
" transaction_record blob,"
" bundle_id text PRIMARY KEY," # NOTE: bundle_id is being stored as bytes, not hex
" confirmed_at_height bigint,"
" created_at_time bigint,"
" to_puzzle_hash text,"
" amount blob,"
" fee_amount blob,"
" confirmed int,"
" sent int,"
" wallet_id bigint,"
" trade_id text,"
" type int)"
)
)
# Useful for reorg lookups
await conn.execute(
"CREATE INDEX IF NOT EXISTS tx_confirmed_index on transaction_record(confirmed_at_height)"
)
await conn.execute("CREATE INDEX IF NOT EXISTS tx_created_index on transaction_record(created_at_time)")
await conn.execute("CREATE INDEX IF NOT EXISTS tx_confirmed on transaction_record(confirmed)")
await conn.execute("CREATE INDEX IF NOT EXISTS tx_sent on transaction_record(sent)")
await conn.execute("CREATE INDEX IF NOT EXISTS tx_created_time on transaction_record(created_at_time)")
await conn.execute("CREATE INDEX IF NOT EXISTS tx_type on transaction_record(type)")
await conn.execute("CREATE INDEX IF NOT EXISTS tx_to_puzzle_hash on transaction_record(to_puzzle_hash)")
await conn.execute(
"CREATE INDEX IF NOT EXISTS transaction_record_wallet_id on transaction_record(wallet_id)"
)
self.tx_submitted = {}
self.last_wallet_tx_resend_time = int(time.time())
return self
async def add_transaction_record(self, record: TransactionRecord) -> None:
"""
Store TransactionRecord in DB and Cache.
"""
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute_insert(
"INSERT OR REPLACE INTO transaction_record VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(
bytes(record),
record.name,
record.confirmed_at_height,
record.created_at_time,
record.to_puzzle_hash.hex(),
bytes(record.amount),
bytes(record.fee_amount),
int(record.confirmed),
record.sent,
record.wallet_id,
record.trade_id,
record.type,
),
)
async def delete_transaction_record(self, tx_id: bytes32) -> None:
async with self.db_wrapper.writer_maybe_transaction() as conn:
await (await conn.execute("DELETE FROM transaction_record WHERE bundle_id=?", (tx_id,))).close()
async def set_confirmed(self, tx_id: bytes32, height: uint32):
"""
Updates transaction to be confirmed.
"""
current: Optional[TransactionRecord] = await self.get_transaction_record(tx_id)
if current is None:
return None
if current.confirmed_at_height == height:
return
tx: TransactionRecord = dataclasses.replace(current, confirmed_at_height=height, confirmed=True)
await self.add_transaction_record(tx)
async def increment_sent(
self,
tx_id: bytes32,
name: str,
send_status: MempoolInclusionStatus,
err: Optional[Err],
) -> bool:
"""
Updates transaction sent count (Full Node has received spend_bundle and sent ack).
"""
current: Optional[TransactionRecord] = await self.get_transaction_record(tx_id)
if current is None:
return False
sent_to = current.sent_to.copy()
current_peers = set()
err_str = err.name if err is not None else None
append_data = (name, uint8(send_status.value), err_str)
for peer_id, status, error in sent_to:
current_peers.add(peer_id)
if name in current_peers:
sent_count = uint32(current.sent)
else:
sent_count = uint32(current.sent + 1)
sent_to.append(append_data)
tx: TransactionRecord = dataclasses.replace(current, sent=sent_count, sent_to=sent_to)
await self.add_transaction_record(tx)
return True
async def tx_reorged(self, record: TransactionRecord):
"""
Updates transaction sent count to 0 and resets confirmation data
"""
tx: TransactionRecord = dataclasses.replace(
record, confirmed_at_height=uint32(0), confirmed=False, sent=uint32(0), sent_to=[]
)
await self.add_transaction_record(tx)
async def get_transaction_record(self, tx_id: bytes32) -> Optional[TransactionRecord]:
"""
Checks DB and cache for TransactionRecord with id: id and returns it.
"""
async with self.db_wrapper.reader_no_transaction() as conn:
# NOTE: bundle_id is being stored as bytes, not hex
rows = list(
await conn.execute_fetchall(
"SELECT transaction_record from transaction_record WHERE bundle_id=?", (tx_id,)
)
)
if len(rows) > 0:
return TransactionRecord.from_bytes(rows[0][0])
return None
# TODO: This should probably be split into separate function, one that
# queries the state and one that updates it. Also, include_accepted_txs=True
# might be a separate function too.
# also, the current time should be passed in as a parameter
async def get_not_sent(self, *, include_accepted_txs=False) -> List[TransactionRecord]:
"""
Returns the list of transactions that have not been received by full node yet.
"""
current_time = int(time.time())
async with self.db_wrapper.reader_no_transaction() as conn:
rows = await conn.execute_fetchall(
"SELECT transaction_record from transaction_record WHERE confirmed=0",
)
records = []
for row in rows:
record = TransactionRecord.from_bytes(row[0])
if include_accepted_txs:
# Reset the "sent" state for peers that have replied about this transaction. Retain errors.
record = dataclasses.replace(record, sent=1, sent_to=filter_ok_mempool_status(record.sent_to))
await self.add_transaction_record(record)
self.tx_submitted[record.name] = current_time, 1
records.append(record)
elif record.name in self.tx_submitted:
time_submitted, count = self.tx_submitted[record.name]
if time_submitted < current_time - (60 * 10):
records.append(record)
self.tx_submitted[record.name] = current_time, 1
else:
if count < 5:
records.append(record)
self.tx_submitted[record.name] = time_submitted, (count + 1)
else:
records.append(record)
self.tx_submitted[record.name] = current_time, 1
return records
async def get_farming_rewards(self) -> List[TransactionRecord]:
"""
Returns the list of all farming rewards.
"""
async with self.db_wrapper.reader_no_transaction() as conn:
fee_int = TransactionType.FEE_REWARD.value
pool_int = TransactionType.COINBASE_REWARD.value
rows = await conn.execute_fetchall(
"SELECT transaction_record from transaction_record WHERE confirmed=1 and (type=? or type=?)",
(fee_int, pool_int),
)
return [TransactionRecord.from_bytes(row[0]) for row in rows]
async def get_all_unconfirmed(self) -> List[TransactionRecord]:
"""
Returns the list of all transaction that have not yet been confirmed.
"""
async with self.db_wrapper.reader_no_transaction() as conn:
rows = await conn.execute_fetchall("SELECT transaction_record from transaction_record WHERE confirmed=0")
return [TransactionRecord.from_bytes(row[0]) for row in rows]
async def get_unconfirmed_for_wallet(self, wallet_id: int) -> List[TransactionRecord]:
"""
Returns the list of transaction that have not yet been confirmed.
"""
async with self.db_wrapper.reader_no_transaction() as conn:
rows = await conn.execute_fetchall(
"SELECT transaction_record from transaction_record WHERE confirmed=0 AND wallet_id=?", (wallet_id,)
)
return [TransactionRecord.from_bytes(row[0]) for row in rows]
async def get_transactions_between(
self, wallet_id: int, start, end, sort_key=None, reverse=False, to_puzzle_hash: Optional[bytes32] = None
) -> List[TransactionRecord]:
"""Return a list of transaction between start and end index. List is in reverse chronological order.
start = 0 is most recent transaction
"""
limit = end - start
if to_puzzle_hash is None:
puzz_hash_where = ""
else:
puzz_hash_where = f' AND to_puzzle_hash="{to_puzzle_hash.hex()}"'
if sort_key is None:
sort_key = "CONFIRMED_AT_HEIGHT"
if sort_key not in SortKey.__members__:
raise ValueError(f"There is no known sort {sort_key}")
if reverse:
query_str = SortKey[sort_key].descending()
else:
query_str = SortKey[sort_key].ascending()
async with self.db_wrapper.reader_no_transaction() as conn:
rows = await conn.execute_fetchall(
f"SELECT transaction_record FROM transaction_record WHERE wallet_id=?{puzz_hash_where}"
f" {query_str}, rowid"
f" LIMIT {start}, {limit}",
(wallet_id,),
)
return [TransactionRecord.from_bytes(row[0]) for row in rows]
async def get_transaction_count_for_wallet(self, wallet_id) -> int:
async with self.db_wrapper.reader_no_transaction() as conn:
rows = list(
await conn.execute_fetchall("SELECT COUNT(*) FROM transaction_record where wallet_id=?", (wallet_id,))
)
return 0 if len(rows) == 0 else rows[0][0]
async def get_all_transactions_for_wallet(self, wallet_id: int, type: int = None) -> List[TransactionRecord]:
"""
Returns all stored transactions.
"""
async with self.db_wrapper.reader_no_transaction() as conn:
if type is None:
rows = await conn.execute_fetchall(
"SELECT transaction_record FROM transaction_record WHERE wallet_id=?", (wallet_id,)
)
else:
rows = await conn.execute_fetchall(
"SELECT transaction_record FROM transaction_record WHERE wallet_id=? AND type=?",
(
wallet_id,
type,
),
)
return [TransactionRecord.from_bytes(row[0]) for row in rows]
async def get_all_transactions(self) -> List[TransactionRecord]:
"""
Returns all stored transactions.
"""
async with self.db_wrapper.reader_no_transaction() as conn:
rows = await conn.execute_fetchall("SELECT transaction_record from transaction_record")
return [TransactionRecord.from_bytes(row[0]) for row in rows]
async def get_transaction_above(self, height: int) -> List[TransactionRecord]:
# Can be -1 (get all tx)
async with self.db_wrapper.reader_no_transaction() as conn:
rows = await conn.execute_fetchall(
"SELECT transaction_record from transaction_record WHERE confirmed_at_height>?", (height,)
)
return [TransactionRecord.from_bytes(row[0]) for row in rows]
async def get_transactions_by_trade_id(self, trade_id: bytes32) -> List[TransactionRecord]:
async with self.db_wrapper.reader_no_transaction() as conn:
rows = await conn.execute_fetchall(
"SELECT transaction_record from transaction_record WHERE trade_id=?", (trade_id,)
)
return [TransactionRecord.from_bytes(row[0]) for row in rows]
async def rollback_to_block(self, height: int):
# Delete from storage
self.tx_submitted = {}
async with self.db_wrapper.writer_maybe_transaction() as conn:
await (await conn.execute("DELETE FROM transaction_record WHERE confirmed_at_height>?", (height,))).close()
async def delete_unconfirmed_transactions(self, wallet_id: int):
async with self.db_wrapper.writer_maybe_transaction() as conn:
await (
await conn.execute("DELETE FROM transaction_record WHERE confirmed=0 AND wallet_id=?", (wallet_id,))
).close()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/wallet_nft_store.py | flax/wallet/wallet_nft_store.py | from __future__ import annotations
import json
import logging
from sqlite3 import Row
from typing import List, Optional, Type, TypeVar, Union
from flax.types.blockchain_format.coin import Coin
from flax.types.blockchain_format.program import Program
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.db_wrapper import DBWrapper2, execute_fetchone
from flax.util.ints import uint32
from flax.wallet.lineage_proof import LineageProof
from flax.wallet.nft_wallet.nft_info import DEFAULT_STATUS, IN_TRANSACTION_STATUS, NFTCoinInfo
log = logging.getLogger(__name__)
_T_WalletNftStore = TypeVar("_T_WalletNftStore", bound="WalletNftStore")
REMOVE_BUFF_BLOCKS = 1000
NFT_COIN_INFO_COLUMNS = "nft_id, coin, lineage_proof, mint_height, status, full_puzzle, latest_height, minter_did"
def _to_nft_coin_info(row: Row) -> NFTCoinInfo:
# nft_id, coin, lineage_proof, mint_height, status, full_puzzle, latest_height, minter_did
return NFTCoinInfo(
bytes32.from_hexstr(row[0]),
Coin.from_json_dict(json.loads(row[1])),
None if row[2] is None else LineageProof.from_json_dict(json.loads(row[2])),
Program.from_bytes(row[5]),
uint32(row[3]),
None if row[7] is None else bytes32.from_hexstr(row[7]),
uint32(row[6]) if row[6] is not None else uint32(0),
row[4] == IN_TRANSACTION_STATUS,
)
class WalletNftStore:
"""
WalletNftStore keeps track of all user created NFTs and necessary smart-contract data
"""
db_wrapper: DBWrapper2
@classmethod
async def create(cls: Type[_T_WalletNftStore], db_wrapper: DBWrapper2) -> _T_WalletNftStore:
self = cls()
self.db_wrapper = db_wrapper
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute(
(
"CREATE TABLE IF NOT EXISTS users_nfts("
" nft_id text PRIMARY KEY,"
" nft_coin_id text,"
" wallet_id int,"
" did_id text,"
" coin text,"
" lineage_proof text,"
" mint_height bigint,"
" status text,"
" full_puzzle blob)"
)
)
await conn.execute("CREATE INDEX IF NOT EXISTS nft_coin_id on users_nfts(nft_coin_id)")
await conn.execute("CREATE INDEX IF NOT EXISTS nft_wallet_id on users_nfts(wallet_id)")
await conn.execute("CREATE INDEX IF NOT EXISTS nft_did_id on users_nfts(did_id)")
try:
# Add your new column on the top, otherwise it will not be created.
await conn.execute("ALTER TABLE users_nfts ADD COLUMN minter_did text")
# These are patched columns for resolving reorg issue
await conn.execute("ALTER TABLE users_nfts ADD COLUMN removed_height bigint")
await conn.execute("ALTER TABLE users_nfts ADD COLUMN latest_height bigint")
await conn.execute("CREATE INDEX IF NOT EXISTS removed_nft_height on users_nfts(removed_height)")
await conn.execute("CREATE INDEX IF NOT EXISTS latest_nft_height on users_nfts(latest_height)")
except Exception:
pass
return self
async def delete_nft_by_nft_id(self, nft_id: bytes32, height: uint32) -> bool:
"""Tries to mark a given NFT as deleted at specific height
This is due to how re-org works
Returns `True` if NFT was found and marked deleted or `False` if not."""
async with self.db_wrapper.writer_maybe_transaction() as conn:
# Remove NFT in the users_nfts table
cursor = await conn.execute(
"UPDATE users_nfts SET removed_height=? WHERE nft_id=?", (int(height), nft_id.hex())
)
return cursor.rowcount > 0
async def delete_nft_by_coin_id(self, coin_id: bytes32, height: uint32) -> bool:
"""Tries to mark a given NFT as deleted at specific height
This is due to how re-org works
Returns `True` if NFT was found and marked deleted or `False` if not."""
async with self.db_wrapper.writer_maybe_transaction() as conn:
# Remove NFT in the users_nfts table
cursor = await conn.execute(
"UPDATE users_nfts SET removed_height=? WHERE nft_coin_id=?", (int(height), coin_id.hex())
)
if cursor.rowcount > 0:
log.info("Deleted NFT with coin id: %s", coin_id.hex())
return True
log.warning("Couldn't find NFT with coin id to delete: %s", coin_id)
return False
async def update_pending_transaction(self, nft_coin_id: bytes32, pending_transaction: bool) -> bool:
async with self.db_wrapper.writer_maybe_transaction() as conn:
c = await conn.execute(
"UPDATE users_nfts SET status=? WHERE nft_coin_id = ?",
(IN_TRANSACTION_STATUS if pending_transaction else DEFAULT_STATUS, nft_coin_id.hex()),
)
return c.rowcount > 0
async def save_nft(self, wallet_id: uint32, did_id: Optional[bytes32], nft_coin_info: NFTCoinInfo) -> None:
async with self.db_wrapper.writer_maybe_transaction() as conn:
columns = (
"nft_id, nft_coin_id, wallet_id, did_id, coin, lineage_proof, mint_height, status, full_puzzle, "
"minter_did, removed_height, latest_height"
)
await conn.execute(
f"INSERT or REPLACE INTO users_nfts ({columns}) VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(
nft_coin_info.nft_id.hex(),
nft_coin_info.coin.name().hex(),
int(wallet_id),
did_id.hex() if did_id else None,
json.dumps(nft_coin_info.coin.to_json_dict()),
json.dumps(nft_coin_info.lineage_proof.to_json_dict())
if nft_coin_info.lineage_proof is not None
else None,
int(nft_coin_info.mint_height),
IN_TRANSACTION_STATUS if nft_coin_info.pending_transaction else DEFAULT_STATUS,
bytes(nft_coin_info.full_puzzle),
None if nft_coin_info.minter_did is None else nft_coin_info.minter_did.hex(),
None,
int(nft_coin_info.latest_height),
),
)
# Rotate the old removed NFTs, they are not possible to be reorged
await conn.execute(
"DELETE FROM users_nfts WHERE removed_height is not NULL and removed_height<?",
(int(nft_coin_info.latest_height) - REMOVE_BUFF_BLOCKS,),
)
async def count(self, wallet_id: Optional[uint32] = None, did_id: Optional[bytes32] = None) -> int:
sql = "SELECT COUNT(nft_id) FROM users_nfts WHERE removed_height is NULL"
params: List[Union[uint32, bytes32]] = []
if wallet_id is not None:
sql += " AND wallet_id=?"
params.append(wallet_id)
if did_id is not None:
sql += " AND did_id=?"
params.append(did_id)
async with self.db_wrapper.reader_no_transaction() as conn:
count_row = await execute_fetchone(conn, sql, params)
if count_row:
return int(count_row[0])
return -1
async def get_nft_list(
self, wallet_id: Optional[uint32] = None, did_id: Optional[bytes32] = None
) -> List[NFTCoinInfo]:
sql: str = f"SELECT {NFT_COIN_INFO_COLUMNS}" " from users_nfts WHERE"
if wallet_id is not None and did_id is None:
sql += f" wallet_id={wallet_id}"
if wallet_id is None and did_id is not None:
sql += f" did_id='{did_id.hex()}'"
if wallet_id is not None and did_id is not None:
sql += f" did_id='{did_id.hex()}' and wallet_id={wallet_id}"
if wallet_id is not None or did_id is not None:
sql += " and"
sql += " removed_height is NULL"
async with self.db_wrapper.reader_no_transaction() as conn:
rows = await conn.execute_fetchall(sql)
return [
NFTCoinInfo(
bytes32.from_hexstr(row[0]),
Coin.from_json_dict(json.loads(row[1])),
None if row[2] is None else LineageProof.from_json_dict(json.loads(row[2])),
Program.from_bytes(row[5]),
uint32(row[3]),
None if row[7] is None else bytes32.from_hexstr(row[7]),
uint32(row[6]) if row[6] is not None else uint32(0),
row[4] == IN_TRANSACTION_STATUS,
)
for row in rows
]
async def exists(self, coin_id: bytes32) -> bool:
async with self.db_wrapper.reader_no_transaction() as conn:
rows = await execute_fetchone(
conn,
"SELECT EXISTS(SELECT nft_id"
" from users_nfts WHERE removed_height is NULL and nft_coin_id=? LIMIT 1)",
(coin_id.hex(),),
)
return True if rows and rows[0] == 1 else False
async def get_nft_by_coin_id(self, nft_coin_id: bytes32) -> Optional[NFTCoinInfo]:
async with self.db_wrapper.reader_no_transaction() as conn:
rows = await conn.execute_fetchall(
f"SELECT {NFT_COIN_INFO_COLUMNS} from users_nfts WHERE removed_height is NULL and nft_coin_id = ?",
(nft_coin_id.hex(),),
)
rows = list(rows)
if len(rows) == 1:
return _to_nft_coin_info(rows[0])
elif len(rows) == 2:
raise ValueError("Can only return one NFT, but found > 1 from given coin ids")
return None
async def get_nft_by_id(self, nft_id: bytes32, wallet_id: Optional[uint32] = None) -> Optional[NFTCoinInfo]:
async with self.db_wrapper.reader_no_transaction() as conn:
sql = f"SELECT {NFT_COIN_INFO_COLUMNS} from users_nfts WHERE removed_height is NULL and nft_id=?"
params: List[Union[uint32, str]] = [nft_id.hex()]
if wallet_id:
sql += " and wallet_id=?"
params.append(wallet_id)
row = await execute_fetchone(
conn,
sql,
params,
)
if row is None:
return None
return _to_nft_coin_info(row)
async def rollback_to_block(self, height: int) -> bool:
"""
Rolls back the blockchain to block_index. All coins confirmed after this point are removed.
All coins spent after this point are set to unspent. Can be -1 (rollback all)
"""
async with self.db_wrapper.writer_maybe_transaction() as conn:
# Remove reorged NFTs
await conn.execute("DELETE FROM users_nfts WHERE latest_height>?", (height,))
# Retrieve removed NFTs
result = await conn.execute(
"UPDATE users_nfts SET removed_height = null WHERE removed_height>?",
(height,),
)
if result.rowcount > 0:
return True
return False
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/wallet_coin_store.py | flax/wallet/wallet_coin_store.py | from typing import List, Optional, Set, Dict
import sqlite3
from flax.types.blockchain_format.coin import Coin
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.db_wrapper import DBWrapper2, execute_fetchone
from flax.util.ints import uint32, uint64
from flax.wallet.util.wallet_types import WalletType
from flax.wallet.wallet_coin_record import WalletCoinRecord
class WalletCoinStore:
"""
This object handles CoinRecords in DB used by wallet.
"""
db_wrapper: DBWrapper2
@classmethod
async def create(cls, wrapper: DBWrapper2):
self = cls()
self.db_wrapper = wrapper
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute(
(
"CREATE TABLE IF NOT EXISTS coin_record("
"coin_name text PRIMARY KEY,"
" confirmed_height bigint,"
" spent_height bigint,"
" spent int,"
" coinbase int,"
" puzzle_hash text,"
" coin_parent text,"
" amount blob,"
" wallet_type int,"
" wallet_id int)"
)
)
# Useful for reorg lookups
await conn.execute("CREATE INDEX IF NOT EXISTS coin_confirmed_height on coin_record(confirmed_height)")
await conn.execute("CREATE INDEX IF NOT EXISTS coin_spent_height on coin_record(spent_height)")
await conn.execute("CREATE INDEX IF NOT EXISTS coin_spent on coin_record(spent)")
await conn.execute("CREATE INDEX IF NOT EXISTS coin_puzzlehash on coin_record(puzzle_hash)")
await conn.execute("CREATE INDEX IF NOT EXISTS coin_record_wallet_type on coin_record(wallet_type)")
await conn.execute("CREATE INDEX IF NOT EXISTS wallet_id on coin_record(wallet_id)")
await conn.execute("CREATE INDEX IF NOT EXISTS coin_amount on coin_record(amount)")
return self
async def count_small_unspent(self, cutoff: int) -> int:
amount_bytes = bytes(uint64(cutoff))
async with self.db_wrapper.reader_no_transaction() as conn:
row = await execute_fetchone(
conn, "SELECT COUNT(*) FROM coin_record WHERE amount < ? AND spent=0", (amount_bytes,)
)
return int(0 if row is None else row[0])
async def get_multiple_coin_records(self, coin_names: List[bytes32]) -> List[WalletCoinRecord]:
"""Return WalletCoinRecord(s) that have a coin name in the specified list"""
if len(coin_names) == 0:
return []
as_hexes = [cn.hex() for cn in coin_names]
async with self.db_wrapper.reader_no_transaction() as conn:
rows = await conn.execute_fetchall(
f'SELECT * from coin_record WHERE coin_name in ({"?," * (len(as_hexes) - 1)}?)', tuple(as_hexes)
)
return [self.coin_record_from_row(row) for row in rows]
# Store CoinRecord in DB and ram cache
async def add_coin_record(self, record: WalletCoinRecord, name: Optional[bytes32] = None) -> None:
if name is None:
name = record.name()
assert record.spent == (record.spent_block_height != 0)
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute_insert(
"INSERT OR REPLACE INTO coin_record VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(
name.hex(),
record.confirmed_block_height,
record.spent_block_height,
int(record.spent),
int(record.coinbase),
str(record.coin.puzzle_hash.hex()),
str(record.coin.parent_coin_info.hex()),
bytes(uint64(record.coin.amount)),
record.wallet_type,
record.wallet_id,
),
)
# Sometimes we realize that a coin is actually not interesting to us so we need to delete it
async def delete_coin_record(self, coin_name: bytes32) -> None:
async with self.db_wrapper.writer_maybe_transaction() as conn:
await (await conn.execute("DELETE FROM coin_record WHERE coin_name=?", (coin_name.hex(),))).close()
# Update coin_record to be spent in DB
async def set_spent(self, coin_name: bytes32, height: uint32) -> None:
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute_insert(
"UPDATE coin_record SET spent_height=?,spent=? WHERE coin_name=?",
(
height,
1,
coin_name.hex(),
),
)
def coin_record_from_row(self, row: sqlite3.Row) -> WalletCoinRecord:
coin = Coin(bytes32.fromhex(row[6]), bytes32.fromhex(row[5]), uint64.from_bytes(row[7]))
return WalletCoinRecord(
coin, uint32(row[1]), uint32(row[2]), bool(row[3]), bool(row[4]), WalletType(row[8]), row[9]
)
async def get_coin_record(self, coin_name: bytes32) -> Optional[WalletCoinRecord]:
"""Returns CoinRecord with specified coin id."""
async with self.db_wrapper.reader_no_transaction() as conn:
rows = list(await conn.execute_fetchall("SELECT * from coin_record WHERE coin_name=?", (coin_name.hex(),)))
if len(rows) == 0:
return None
return self.coin_record_from_row(rows[0])
async def get_coin_records(self, coin_names: List[bytes32]) -> List[Optional[WalletCoinRecord]]:
"""Returns CoinRecord with specified coin id."""
async with self.db_wrapper.reader_no_transaction() as conn:
rows = list(
await conn.execute_fetchall(
f"SELECT * from coin_record WHERE coin_name in ({','.join('?'*len(coin_names))})",
[c.hex() for c in coin_names],
)
)
ret: Dict[bytes32, WalletCoinRecord] = {}
for row in rows:
record = self.coin_record_from_row(row)
coin_name = bytes32.fromhex(row[0])
ret[coin_name] = record
return [ret.get(name) for name in coin_names]
async def get_first_coin_height(self) -> Optional[uint32]:
"""Returns height of first confirmed coin"""
async with self.db_wrapper.reader_no_transaction() as conn:
rows = list(await conn.execute_fetchall("SELECT MIN(confirmed_height) FROM coin_record"))
if len(rows) != 0 and rows[0][0] is not None:
return uint32(rows[0][0])
return None
async def get_unspent_coins_for_wallet(self, wallet_id: int) -> Set[WalletCoinRecord]:
"""Returns set of CoinRecords that have not been spent yet for a wallet."""
async with self.db_wrapper.reader_no_transaction() as conn:
rows = await conn.execute_fetchall(
"SELECT * FROM coin_record WHERE wallet_id=? AND spent_height=0", (wallet_id,)
)
return set(self.coin_record_from_row(row) for row in rows)
async def get_all_unspent_coins(self) -> Set[WalletCoinRecord]:
"""Returns set of CoinRecords that have not been spent yet for a wallet."""
async with self.db_wrapper.reader_no_transaction() as conn:
rows = await conn.execute_fetchall("SELECT * FROM coin_record WHERE spent_height=0")
return set(self.coin_record_from_row(row) for row in rows)
async def get_coin_names_to_check(self, check_height) -> Set[bytes32]:
"""Returns set of all CoinRecords."""
async with self.db_wrapper.reader_no_transaction() as conn:
rows = await conn.execute_fetchall(
"SELECT coin_name from coin_record where spent_height=0 or spent_height>? or confirmed_height>?",
(
check_height,
check_height,
),
)
return set(bytes32.fromhex(row[0]) for row in rows)
# Checks DB and DiffStores for CoinRecords with puzzle_hash and returns them
async def get_coin_records_by_puzzle_hash(self, puzzle_hash: bytes32) -> List[WalletCoinRecord]:
"""Returns a list of all coin records with the given puzzle hash"""
async with self.db_wrapper.reader_no_transaction() as conn:
rows = await conn.execute_fetchall("SELECT * from coin_record WHERE puzzle_hash=?", (puzzle_hash.hex(),))
return [self.coin_record_from_row(row) for row in rows]
# Checks DB and DiffStores for CoinRecords with parent_coin_info and returns them
async def get_coin_records_by_parent_id(self, parent_coin_info: bytes32) -> List[WalletCoinRecord]:
"""Returns a list of all coin records with the given parent id"""
async with self.db_wrapper.reader_no_transaction() as conn:
rows = await conn.execute_fetchall(
"SELECT * from coin_record WHERE coin_parent=?", (parent_coin_info.hex(),)
)
return [self.coin_record_from_row(row) for row in rows]
async def rollback_to_block(self, height: int) -> None:
"""
Rolls back the blockchain to block_index. All coins confirmed after this point are removed.
All coins spent after this point are set to unspent. Can be -1 (rollback all)
"""
async with self.db_wrapper.writer_maybe_transaction() as conn:
await (await conn.execute("DELETE FROM coin_record WHERE confirmed_height>?", (height,))).close()
await (
await conn.execute(
"UPDATE coin_record SET spent_height = 0, spent = 0 WHERE spent_height>?",
(height,),
)
).close()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/wallet_info.py | flax/wallet/wallet_info.py | from __future__ import annotations
from dataclasses import dataclass
from typing import List
from flax.util.ints import uint8, uint32
from flax.util.streamable import Streamable, streamable
@streamable
@dataclass(frozen=True)
class WalletInfo(Streamable):
"""
This object represents the wallet data as it is stored in DB.
ID: Main wallet (Standard) is stored at index 1, every wallet created after done has auto incremented id.
Name: can be a user provided or default generated name. (can be modified)
Type: is specified during wallet creation and should never be changed.
Data: this filed is intended to be used for storing any wallet specific information required for it.
(RL wallet stores origin_id, admin/user pubkey, rate limit, etc.)
This data should be json encoded string.
"""
id: uint32
name: str
type: uint8 # WalletType(type)
data: str
@streamable
@dataclass(frozen=True)
class WalletInfoBackup(Streamable):
"""
Used for transforming list of WalletInfo objects into bytes.
"""
wallet_list: List[WalletInfo]
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/flaxlisp.py | flax/wallet/flaxlisp.py | from __future__ import annotations
def sexp(*argv):
return f'({f" ".join([str(arg) for arg in argv])})'
def cons(a, b):
return sexp("c", a, b)
def first(obj):
return sexp("f", obj)
def rest(obj):
return sexp("r", obj)
def nth(obj, *path):
if not path:
return obj
if path[0] < 0:
raise ValueError
if path[0] == 0:
return nth(first(obj), *path[1:])
else:
return nth(rest(obj), *(path[0] - 1,) + path[1:])
def args(*path, p=1):
if len(path) == 0:
return str(p)
if path[0] < 0:
raise ValueError
return args(*path[1:], p=(2 * p << path[0]) | (2 ** path[0] - 1))
def eval(code, env=args()):
return sexp("a", code, env)
def apply(name, argv):
return sexp(*[name] + list(argv))
def quote(obj):
return sexp("q .", obj)
nil = sexp()
def make_if(predicate, true_expression, false_expression):
return eval(apply("i", [predicate, quote(true_expression), quote(false_expression)]))
def make_list(*argv, terminator=nil):
if len(argv) == 0:
return terminator
else:
return cons(argv[0], make_list(*argv[1:], terminator=terminator))
def fail(*argv):
return apply("x", argv)
def sha256(*argv):
return apply("sha256", argv)
SHA256TREE_PROG = """
(a (q . (a 2 (c 2 (c 3 0))))
(c (q . (a (i (l 5)
(q . (sha256 (q . 2)
(a 2 (c 2 (c 9 0)))
(a 2 (c 2 (c 13 0)))))
(q . (sha256 (q . 1) 5))) 1)) %s))
"""
def sha256tree(*argv):
return SHA256TREE_PROG % argv[0]
def equal(*argv):
return apply("=", argv)
def multiply(*argv):
return apply("*", argv)
def add(*argv):
return apply("+", argv)
def subtract(*argv):
return apply("-", argv)
def is_zero(obj):
return equal(obj, quote("0"))
def iff(*argv):
return apply("i", argv)
def hexstr(str):
return quote(f"0x{str}")
def greater(*argv):
return apply(">", argv)
def string(str):
return f'"{str}"'
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/derive_keys.py | flax/wallet/derive_keys.py | from typing import List, Optional, Tuple, Set
from blspy import AugSchemeMPL, PrivateKey, G1Element
from flax.consensus.coinbase import create_puzzlehash_for_pk
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.ints import uint32
# EIP 2334 bls key derivation
# https://eips.ethereum.org/EIPS/eip-2334
# 12381 = bls spec number
# 8444 = Chia blockchain number and port number for compatibility
# 0, 1, 2, 3, 4, 5, 6 farmer, pool, wallet, local, backup key, singleton, pooling authentication key numbers
# Allows up to 100 pool wallets (plot NFTs)
MAX_POOL_WALLETS = 100
def _derive_path(sk: PrivateKey, path: List[int]) -> PrivateKey:
for index in path:
sk = AugSchemeMPL.derive_child_sk(sk, index)
return sk
def _derive_path_unhardened(sk: PrivateKey, path: List[int]) -> PrivateKey:
for index in path:
sk = AugSchemeMPL.derive_child_sk_unhardened(sk, index)
return sk
def master_sk_to_farmer_sk(master: PrivateKey) -> PrivateKey:
return _derive_path(master, [12381, 8444, 0, 0])
def master_sk_to_pool_sk(master: PrivateKey) -> PrivateKey:
return _derive_path(master, [12381, 8444, 1, 0])
def master_sk_to_wallet_sk_intermediate(master: PrivateKey) -> PrivateKey:
return _derive_path(master, [12381, 8444, 2])
def master_sk_to_wallet_sk(master: PrivateKey, index: uint32) -> PrivateKey:
intermediate = master_sk_to_wallet_sk_intermediate(master)
return _derive_path(intermediate, [index])
def master_sk_to_wallet_sk_unhardened_intermediate(master: PrivateKey) -> PrivateKey:
return _derive_path_unhardened(master, [12381, 8444, 2])
def master_sk_to_wallet_sk_unhardened(master: PrivateKey, index: uint32) -> PrivateKey:
intermediate = master_sk_to_wallet_sk_unhardened_intermediate(master)
return _derive_path_unhardened(intermediate, [index])
def master_sk_to_local_sk(master: PrivateKey) -> PrivateKey:
return _derive_path(master, [12381, 8444, 3, 0])
def master_sk_to_backup_sk(master: PrivateKey) -> PrivateKey:
return _derive_path(master, [12381, 8444, 4, 0])
def master_sk_to_singleton_owner_sk(master: PrivateKey, pool_wallet_index: uint32) -> PrivateKey:
"""
This key controls a singleton on the blockchain, allowing for dynamic pooling (changing pools)
"""
return _derive_path(master, [12381, 8444, 5, pool_wallet_index])
def master_sk_to_pooling_authentication_sk(master: PrivateKey, pool_wallet_index: uint32, index: uint32) -> PrivateKey:
"""
This key is used for the farmer to authenticate to the pool when sending partials
"""
assert index < 10000
assert pool_wallet_index < 10000
return _derive_path(master, [12381, 8444, 6, pool_wallet_index * 10000 + index])
def find_owner_sk(all_sks: List[PrivateKey], owner_pk: G1Element) -> Optional[Tuple[PrivateKey, uint32]]:
for pool_wallet_index in range(MAX_POOL_WALLETS):
for sk in all_sks:
try_owner_sk = master_sk_to_singleton_owner_sk(sk, uint32(pool_wallet_index))
if try_owner_sk.get_g1() == owner_pk:
return try_owner_sk, uint32(pool_wallet_index)
return None
def find_authentication_sk(all_sks: List[PrivateKey], owner_pk: G1Element) -> Optional[PrivateKey]:
# NOTE: might need to increase this if using a large number of wallets, or have switched authentication keys
# many times.
for pool_wallet_index in range(MAX_POOL_WALLETS):
for sk in all_sks:
try_owner_sk = master_sk_to_singleton_owner_sk(sk, uint32(pool_wallet_index))
if try_owner_sk.get_g1() == owner_pk:
# NOTE: ONLY use 0 for authentication key index to ensure compatibility
return master_sk_to_pooling_authentication_sk(sk, uint32(pool_wallet_index), uint32(0))
return None
def match_address_to_sk(
sk: PrivateKey, addresses_to_search: List[bytes32], max_ph_to_search: int = 500
) -> Set[bytes32]:
"""
Checks the list of given address is a derivation of the given sk within the given number of derivations
Returns a Set of the addresses that are derivations of the given sk
"""
if sk is None or not addresses_to_search:
return set()
found_addresses: Set[bytes32] = set()
search_list: Set[bytes32] = set(addresses_to_search)
for i in range(max_ph_to_search):
phs = [
create_puzzlehash_for_pk(master_sk_to_wallet_sk(sk, uint32(i)).get_g1()),
create_puzzlehash_for_pk(master_sk_to_wallet_sk_unhardened(sk, uint32(i)).get_g1()),
]
for address in search_list:
if address in phs:
found_addresses.add(address)
search_list = search_list - found_addresses
if not len(search_list):
return found_addresses
return found_addresses
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/puzzle_drivers.py | flax/wallet/puzzle_drivers.py | from __future__ import annotations
from dataclasses import dataclass
from typing import Any, Dict, List, Optional
from clvm.casts import int_from_bytes
from clvm.SExp import SExp
from clvm_tools.binutils import assemble, type_for_atom
from ir.Type import Type
from flax.types.blockchain_format.program import Program
"""
The following two classes act as wrapper classes around dictionaries of strings.
Values in the dictionary are assumed to be strings in CLVM format (0x for bytes, etc.)
When you access a value in the dictionary, it will be deserialized to a str, int, bytes, or Program appropriately.
"""
@dataclass(frozen=True)
class PuzzleInfo:
"""
There are two 'magic' keys in a PuzzleInfo object:
- 'type' must be an included key (for easy lookup of drivers)
- 'also' gets its own method as it's the supported way to do recursion of PuzzleInfos
"""
info: Dict[str, Any]
def __post_init__(self) -> None:
if "type" not in self.info:
raise ValueError("A type is required to initialize a puzzle driver")
def __getitem__(self, item: str) -> Any:
value = self.info[item]
return decode_info_value(PuzzleInfo, value)
def __eq__(self, other: object) -> bool:
for key, value in self.info.items():
try:
if self[key] != other[key]: # type: ignore
return False
except Exception:
return False
return True
def __contains__(self, item: str) -> bool:
if item in self.info:
return True
else:
return False
def type(self) -> str:
return str(self.info["type"])
def also(self) -> Optional[PuzzleInfo]:
if "also" in self.info:
return PuzzleInfo(self.info["also"])
else:
return None
def check_type(self, types: List[str]) -> bool:
if types == []:
if self.also() is None:
return True
else:
return False
else:
if self.type() == types[0]:
types.pop(0)
if self.also():
return self.also().check_type(types) # type: ignore
else:
return self.check_type(types)
else:
return False
@dataclass(frozen=True)
class Solver:
info: Dict[str, Any]
def __getitem__(self, item: str) -> Any:
value = self.info[item]
return decode_info_value(Solver, value)
def __eq__(self, other: object) -> bool:
for key, value in self.info.items():
try:
if self[key] != other[key]: # type: ignore
return False
except Exception:
return False
return True
def decode_info_value(cls: Any, value: Any) -> Any:
if isinstance(value, dict):
return cls(value)
elif isinstance(value, list):
return [decode_info_value(cls, v) for v in value]
else:
if value == "()": # special case
return Program.to([])
expression: SExp = assemble(value) # type: ignore
if expression.atom is None:
return Program(expression)
else:
atom: bytes = expression.atom
typ = type_for_atom(atom)
if typ == Type.QUOTES and value[0:2] != "0x":
return bytes(atom).decode("utf8")
elif typ == Type.INT:
return int_from_bytes(atom)
else:
return atom
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/wallet_protocol.py | flax/wallet/wallet_protocol.py | from __future__ import annotations
from typing import TYPE_CHECKING, List, Optional, Set
from blspy import G1Element
from typing_extensions import Protocol
from flax.server.ws_connection import WSFlaxConnection
from flax.types.blockchain_format.coin import Coin
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.ints import uint8, uint32, uint64, uint128
from flax.wallet.wallet_coin_record import WalletCoinRecord
if TYPE_CHECKING:
from flax.wallet.wallet_state_manager import WalletStateManager
class WalletProtocol(Protocol):
# TODO: it seems like this should return WalletType instead
@classmethod
def type(cls) -> uint8:
...
def id(self) -> uint32:
...
async def coin_added(self, coin: Coin, height: uint32, peer: WSFlaxConnection) -> None:
...
async def select_coins(
self,
amount: uint64,
exclude: Optional[List[Coin]] = None,
min_coin_amount: Optional[uint64] = None,
max_coin_amount: Optional[uint64] = None,
) -> Set[Coin]:
...
async def get_confirmed_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint128:
...
async def get_unconfirmed_balance(self, unspent_records: Optional[Set[WalletCoinRecord]] = None) -> uint128:
...
async def get_spendable_balance(self, unspent_records: Optional[Set[WalletCoinRecord]] = None) -> uint128:
...
async def get_pending_change_balance(self) -> uint64:
...
async def get_max_send_amount(self, records: Optional[Set[WalletCoinRecord]] = None) -> uint128:
...
# not all wallet supports this. To signal support, make
# require_derivation_paths() return true
def puzzle_hash_for_pk(self, pubkey: G1Element) -> bytes32:
...
def require_derivation_paths(self) -> bool:
...
# WalletStateManager is only imported for type hinting thus leaving pylint
# unable to process this
wallet_state_manager: WalletStateManager # pylint: disable=used-before-assignment
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/coin_selection.py | flax/wallet/coin_selection.py | from __future__ import annotations
import logging
import random
from typing import Dict, List, Optional, Set
from flax.types.blockchain_format.coin import Coin
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.ints import uint64, uint128
from flax.wallet.wallet_coin_record import WalletCoinRecord
async def select_coins(
spendable_amount: uint128,
max_coin_amount: uint64,
spendable_coins: List[WalletCoinRecord],
unconfirmed_removals: Dict[bytes32, Coin],
log: logging.Logger,
amount: uint128,
exclude: Optional[List[Coin]] = None,
min_coin_amount: Optional[uint64] = None,
) -> Set[Coin]:
"""
Returns a set of coins that can be used for generating a new transaction.
"""
if exclude is None:
exclude = []
if min_coin_amount is None:
min_coin_amount = uint64(0)
if amount > spendable_amount:
error_msg = (
f"Can't select amount higher than our spendable balance. Amount: {amount}, spendable: {spendable_amount}"
)
log.warning(error_msg)
raise ValueError(error_msg)
log.debug(f"About to select coins for amount {amount}")
max_num_coins = 500
sum_spendable_coins = 0
valid_spendable_coins: List[Coin] = []
for coin_record in spendable_coins: # remove all the unconfirmed coins, excluded coins and dust.
if coin_record.coin.name() in unconfirmed_removals:
continue
if coin_record.coin in exclude:
continue
if coin_record.coin.amount < min_coin_amount or coin_record.coin.amount > max_coin_amount:
continue
valid_spendable_coins.append(coin_record.coin)
sum_spendable_coins += coin_record.coin.amount
# This happens when we couldn't use one of the coins because it's already used
# but unconfirmed, and we are waiting for the change. (unconfirmed_additions)
if sum_spendable_coins < amount:
raise ValueError(
f"Transaction for {amount} is greater than spendable balance of {sum_spendable_coins}. "
"There may be other transactions pending or our minimum coin amount is too high."
)
if amount == 0 and sum_spendable_coins == 0:
raise ValueError(
"No coins available to spend, you can not create a coin with an amount of 0,"
" without already having coins."
)
# Sort the coins by amount
valid_spendable_coins.sort(reverse=True, key=lambda r: r.amount)
# check for exact 1 to 1 coin match.
exact_match_coin: Optional[Coin] = check_for_exact_match(valid_spendable_coins, uint64(amount))
if exact_match_coin:
log.debug(f"selected coin with an exact match: {exact_match_coin}")
return {exact_match_coin}
# Check for an exact match with all of the coins smaller than the amount.
# If we have more, smaller coins than the amount we run the next algorithm.
smaller_coin_sum = 0 # coins smaller than target.
smaller_coins: List[Coin] = []
for coin in valid_spendable_coins:
if coin.amount < amount:
smaller_coin_sum += coin.amount
smaller_coins.append(coin)
if smaller_coin_sum == amount and len(smaller_coins) < max_num_coins and amount != 0:
log.debug(f"Selected all smaller coins because they equate to an exact match of the target.: {smaller_coins}")
return set(smaller_coins)
elif smaller_coin_sum < amount:
smallest_coin: Optional[Coin] = select_smallest_coin_over_target(amount, valid_spendable_coins)
assert smallest_coin is not None # Since we know we have enough, there must be a larger coin
log.debug(f"Selected closest greater coin: {smallest_coin.name()}")
return {smallest_coin}
elif smaller_coin_sum > amount:
coin_set: Optional[Set[Coin]] = knapsack_coin_algorithm(smaller_coins, amount, max_coin_amount, max_num_coins)
log.debug(f"Selected coins from knapsack algorithm: {coin_set}")
if coin_set is None:
coin_set = sum_largest_coins(amount, smaller_coins)
if coin_set is None or len(coin_set) > max_num_coins:
greater_coin = select_smallest_coin_over_target(amount, valid_spendable_coins)
if greater_coin is None:
raise ValueError(
f"Transaction of {amount} mojo would use more than "
f"{max_num_coins} coins. Try sending a smaller amount"
)
coin_set = {greater_coin}
return coin_set
else:
# if smaller_coin_sum == amount and (len(smaller_coins) >= max_num_coins or amount == 0)
potential_large_coin: Optional[Coin] = select_smallest_coin_over_target(amount, valid_spendable_coins)
if potential_large_coin is None:
raise ValueError("Too many coins are required to make this transaction")
log.debug(f"Resorted to selecting smallest coin over target due to dust.: {potential_large_coin}")
return {potential_large_coin}
# These algorithms were based off of the algorithms in:
# https://murch.one/wp-content/uploads/2016/11/erhardt2016coinselection.pdf
# we use this to check if one of the coins exactly matches the target.
def check_for_exact_match(coin_list: List[Coin], target: uint64) -> Optional[Coin]:
for coin in coin_list:
if coin.amount == target:
return coin
return None
# amount of coins smaller than target, followed by a list of all valid spendable coins.
# Coins must be sorted in descending amount order.
def select_smallest_coin_over_target(target: uint128, sorted_coin_list: List[Coin]) -> Optional[Coin]:
if sorted_coin_list[0].amount < target:
return None
for coin in reversed(sorted_coin_list):
if coin.amount >= target:
return coin
assert False # Should never reach here
# we use this to find the set of coins which have total value closest to the target, but at least the target.
# IMPORTANT: The coins have to be sorted in descending order or else this function will not work.
def knapsack_coin_algorithm(
smaller_coins: List[Coin], target: uint128, max_coin_amount: int, max_num_coins: int, seed: bytes = b"knapsack seed"
) -> Optional[Set[Coin]]:
best_set_sum = max_coin_amount
best_set_of_coins: Optional[Set[Coin]] = None
ran: random.Random = random.Random()
ran.seed(seed)
for i in range(1000):
# reset these variables every loop.
selected_coins: Set[Coin] = set()
selected_coins_sum = 0
n_pass = 0
target_reached = False
while n_pass < 2 and not target_reached:
for coin in smaller_coins:
# run 2 passes where the first pass may select a coin 50% of the time.
# the second pass runs to finish the set if the first pass didn't finish the set.
# this makes each trial random and increases the chance of getting a perfect set.
if (n_pass == 0 and bool(ran.getrandbits(1))) or (n_pass == 1 and coin not in selected_coins):
if len(selected_coins) > max_num_coins:
break
selected_coins_sum += coin.amount
selected_coins.add(coin)
if selected_coins_sum == target:
return selected_coins
if selected_coins_sum > target:
target_reached = True
if selected_coins_sum < best_set_sum:
best_set_of_coins = selected_coins.copy()
best_set_sum = selected_coins_sum
selected_coins_sum -= coin.amount
selected_coins.remove(coin)
n_pass += 1
return best_set_of_coins
# Adds up the largest coins in the list, resulting in the minimum number of selected coins. A solution
# is guaranteed if and only if the sum(coins) >= target. Coins must be sorted in descending amount order.
def sum_largest_coins(target: uint128, sorted_coins: List[Coin]) -> Optional[Set[Coin]]:
total_value = 0
selected_coins: Set[Coin] = set()
for coin in sorted_coins:
total_value += coin.amount
selected_coins.add(coin)
if total_value >= target:
return selected_coins
return None
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/key_val_store.py | flax/wallet/key_val_store.py | from __future__ import annotations
from typing import Any
from flax.util.db_wrapper import DBWrapper2
class KeyValStore:
"""
Multipurpose persistent key-value store
"""
db_wrapper: DBWrapper2
@classmethod
async def create(cls, db_wrapper: DBWrapper2):
self = cls()
self.db_wrapper = db_wrapper
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute("CREATE TABLE IF NOT EXISTS key_val_store(" " key text PRIMARY KEY," " value blob)")
await conn.execute("CREATE INDEX IF NOT EXISTS key_val_name on key_val_store(key)")
return self
async def get_object(self, key: str, object_type: Any) -> Any:
"""
Return bytes representation of stored object
"""
async with self.db_wrapper.reader_no_transaction() as conn:
cursor = await conn.execute("SELECT * from key_val_store WHERE key=?", (key,))
row = await cursor.fetchone()
await cursor.close()
if row is None:
return None
return object_type.from_bytes(row[1])
async def set_object(self, key: str, obj: Any):
"""
Adds object to key val store. Obj MUST support __bytes__ and bytes() methods.
"""
async with self.db_wrapper.writer_maybe_transaction() as conn:
cursor = await conn.execute(
"INSERT OR REPLACE INTO key_val_store VALUES(?, ?)",
(key, bytes(obj)),
)
await cursor.close()
async def remove_object(self, key: str):
async with self.db_wrapper.writer_maybe_transaction() as conn:
cursor = await conn.execute("DELETE FROM key_val_store where key=?", (key,))
await cursor.close()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/sign_coin_spends.py | flax/wallet/sign_coin_spends.py | import inspect
from typing import List, Any
import blspy
from blspy import AugSchemeMPL
from flax.types.coin_spend import CoinSpend
from flax.types.spend_bundle import SpendBundle
from flax.util.condition_tools import conditions_dict_for_solution, pkm_pairs_for_conditions_dict
async def sign_coin_spends(
coin_spends: List[CoinSpend],
secret_key_for_public_key_f: Any, # Potentially awaitable function from G1Element => Optional[PrivateKey]
additional_data: bytes,
max_cost: int,
) -> SpendBundle:
"""
Sign_coin_spends runs the puzzle code with the given argument and searches the
result for an AGG_SIG_ME condition, which it attempts to sign by requesting a
matching PrivateKey corresponding with the given G1Element (public key) specified
in the resulting condition output.
It's important to note that as mentioned in the documentation about the standard
spend that the public key presented to the secret_key_for_public_key_f function
provided to sign_coin_spends must be prepared to do the key derivations required
by the coin types it's allowed to spend (at least the derivation of the standard
spend as done by calculate_synthetic_secret_key with DEFAULT_PUZZLE_HASH).
If a coin performed a different key derivation, the pk presented to this function
would be similarly alien, and would need to be tried against the first stage
derived keys (those returned by master_sk_to_wallet_sk from the ['sk'] member of
wallet rpc's get_private_key method).
"""
signatures: List[blspy.G2Element] = []
pk_list: List[blspy.G1Element] = []
msg_list: List[bytes] = []
for coin_spend in coin_spends:
# Get AGG_SIG conditions
err, conditions_dict, cost = conditions_dict_for_solution(
coin_spend.puzzle_reveal, coin_spend.solution, max_cost
)
if err or conditions_dict is None:
error_msg = f"Sign transaction failed, con:{conditions_dict}, error: {err}"
raise ValueError(error_msg)
# Create signature
for pk_bytes, msg in pkm_pairs_for_conditions_dict(conditions_dict, coin_spend.coin.name(), additional_data):
pk = blspy.G1Element.from_bytes(pk_bytes)
pk_list.append(pk)
msg_list.append(msg)
if inspect.iscoroutinefunction(secret_key_for_public_key_f):
secret_key = await secret_key_for_public_key_f(pk)
else:
secret_key = secret_key_for_public_key_f(pk)
if secret_key is None:
e_msg = f"no secret key for {pk}"
raise ValueError(e_msg)
assert bytes(secret_key.get_g1()) == bytes(pk)
signature = AugSchemeMPL.sign(secret_key, msg)
assert AugSchemeMPL.verify(pk, msg, signature)
signatures.append(signature)
# Aggregate signatures
aggsig = AugSchemeMPL.aggregate(signatures)
assert AugSchemeMPL.aggregate_verify(pk_list, msg_list, aggsig)
return SpendBundle(coin_spends, aggsig)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/__init__.py | flax/wallet/__init__.py | python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false | |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/wallet_puzzle_store.py | flax/wallet/wallet_puzzle_store.py | from __future__ import annotations
import asyncio
import logging
from typing import Dict, List, Optional, Set, Tuple
from blspy import G1Element
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.db_wrapper import DBWrapper2, execute_fetchone
from flax.util.ints import uint32
from flax.util.lru_cache import LRUCache
from flax.wallet.derivation_record import DerivationRecord
from flax.wallet.util.wallet_types import WalletType
log = logging.getLogger(__name__)
class WalletPuzzleStore:
"""
WalletPuzzleStore keeps track of all generated puzzle_hashes and their derivation path / wallet.
This is only used for HD wallets where each address is derived from a public key. Otherwise, use the
WalletInterestedStore to keep track of puzzle hashes which we are interested in.
"""
lock: asyncio.Lock
db_wrapper: DBWrapper2
wallet_info_for_ph_cache: LRUCache
# maps wallet_id -> last_derivation_index
last_wallet_derivation_index: Dict[uint32, uint32]
last_derivation_index: Optional[uint32]
@classmethod
async def create(cls, db_wrapper: DBWrapper2):
self = cls()
self.db_wrapper = db_wrapper
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute(
(
"CREATE TABLE IF NOT EXISTS derivation_paths("
"derivation_index int,"
" pubkey text,"
" puzzle_hash text,"
" wallet_type int,"
" wallet_id int,"
" used tinyint,"
" hardened tinyint,"
" PRIMARY KEY(puzzle_hash, wallet_id))"
)
)
await conn.execute(
"CREATE INDEX IF NOT EXISTS derivation_index_index on derivation_paths(derivation_index)"
)
await conn.execute("CREATE INDEX IF NOT EXISTS ph on derivation_paths(puzzle_hash)")
await conn.execute("CREATE INDEX IF NOT EXISTS pubkey on derivation_paths(pubkey)")
await conn.execute("CREATE INDEX IF NOT EXISTS wallet_type on derivation_paths(wallet_type)")
await conn.execute("CREATE INDEX IF NOT EXISTS derivation_paths_wallet_id on derivation_paths(wallet_id)")
await conn.execute("CREATE INDEX IF NOT EXISTS used on derivation_paths(wallet_type)")
# the lock is locked by the users of this class
self.lock = asyncio.Lock()
self.wallet_info_for_ph_cache = LRUCache(100)
self.last_derivation_index = None
self.last_wallet_derivation_index = {}
return self
async def add_derivation_paths(self, records: List[DerivationRecord]) -> None:
"""
Insert many derivation paths into the database.
"""
if len(records) == 0:
return
sql_records = []
for record in records:
log.debug("Adding derivation record: %s", record)
if record.hardened:
hardened = 1
else:
hardened = 0
sql_records.append(
(
record.index,
bytes(record.pubkey).hex(),
record.puzzle_hash.hex(),
record.wallet_type,
record.wallet_id,
0,
hardened,
),
)
self.last_derivation_index = (
record.index if self.last_derivation_index is None else max(self.last_derivation_index, record.index)
)
if record.wallet_id not in self.last_wallet_derivation_index:
self.last_wallet_derivation_index[record.wallet_id] = record.index
else:
self.last_wallet_derivation_index[record.wallet_id] = max(
self.last_wallet_derivation_index[record.wallet_id], record.index
)
async with self.db_wrapper.writer_maybe_transaction() as conn:
await (
await conn.executemany(
"INSERT OR REPLACE INTO derivation_paths VALUES(?, ?, ?, ?, ?, ?, ?)",
sql_records,
)
).close()
async def get_derivation_record(
self, index: uint32, wallet_id: uint32, hardened: bool
) -> Optional[DerivationRecord]:
"""
Returns the derivation record by index and wallet id.
"""
if hardened:
hard = 1
else:
hard = 0
async with self.db_wrapper.reader_no_transaction() as conn:
row = await execute_fetchone(
conn,
"SELECT derivation_index, pubkey, puzzle_hash, wallet_type, wallet_id, used FROM derivation_paths "
"WHERE derivation_index=? AND wallet_id=? AND hardened=?",
(index, wallet_id, hard),
)
if row is not None and row[0] is not None:
return self.row_to_record(row)
return None
async def get_derivation_record_for_puzzle_hash(self, puzzle_hash: bytes32) -> Optional[DerivationRecord]:
"""
Returns the derivation record by index and wallet id.
"""
async with self.db_wrapper.reader_no_transaction() as conn:
row = await execute_fetchone(
conn,
"SELECT derivation_index, pubkey, puzzle_hash, wallet_type, wallet_id, hardened FROM derivation_paths "
"WHERE puzzle_hash=?",
(puzzle_hash.hex(),),
)
if row is not None and row[0] is not None:
return self.row_to_record(row)
return None
async def set_used_up_to(self, index: uint32) -> None:
"""
Sets a derivation path to used so we don't use it again.
"""
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute_insert(
"UPDATE derivation_paths SET used=1 WHERE derivation_index<=?",
(index,),
)
async def puzzle_hash_exists(self, puzzle_hash: bytes32) -> bool:
"""
Checks if passed puzzle_hash is present in the db.
"""
async with self.db_wrapper.reader_no_transaction() as conn:
row = await execute_fetchone(
conn, "SELECT puzzle_hash FROM derivation_paths WHERE puzzle_hash=?", (puzzle_hash.hex(),)
)
return row is not None
def row_to_record(self, row) -> DerivationRecord:
return DerivationRecord(
uint32(row[0]),
bytes32.fromhex(row[2]),
G1Element.from_bytes(bytes.fromhex(row[1])),
WalletType(row[3]),
uint32(row[4]),
bool(row[5]),
)
async def index_for_pubkey(self, pubkey: G1Element) -> Optional[uint32]:
"""
Returns derivation paths for the given pubkey.
Returns None if not present.
"""
async with self.db_wrapper.reader_no_transaction() as conn:
row = await execute_fetchone(
conn, "SELECT derivation_index FROM derivation_paths WHERE pubkey=?", (bytes(pubkey).hex(),)
)
if row is not None:
return uint32(row[0])
return None
async def record_for_pubkey(self, pubkey: G1Element) -> Optional[DerivationRecord]:
"""
Returns derivation record for the given pubkey.
Returns None if not present.
"""
async with self.db_wrapper.reader_no_transaction() as conn:
row = await execute_fetchone(
conn,
"SELECT derivation_index, pubkey, puzzle_hash, wallet_type, wallet_id, hardened "
"FROM derivation_paths "
"WHERE pubkey=?",
(bytes(pubkey).hex(),),
)
return None if row is None else self.row_to_record(row)
async def index_for_puzzle_hash(self, puzzle_hash: bytes32) -> Optional[uint32]:
"""
Returns the derivation path for the puzzle_hash.
Returns None if not present.
"""
async with self.db_wrapper.reader_no_transaction() as conn:
row = await execute_fetchone(
conn, "SELECT derivation_index FROM derivation_paths WHERE puzzle_hash=?", (puzzle_hash.hex(),)
)
return None if row is None else uint32(row[0])
async def record_for_puzzle_hash(self, puzzle_hash: bytes32) -> Optional[DerivationRecord]:
"""
Returns the derivation path for the puzzle_hash.
Returns None if not present.
"""
async with self.db_wrapper.reader_no_transaction() as conn:
row = await execute_fetchone(
conn,
"SELECT derivation_index, pubkey, puzzle_hash, wallet_type, wallet_id, hardened "
"FROM derivation_paths "
"WHERE puzzle_hash=?",
(puzzle_hash.hex(),),
)
if row is not None and row[0] is not None:
return self.row_to_record(row)
return None
async def index_for_puzzle_hash_and_wallet(self, puzzle_hash: bytes32, wallet_id: uint32) -> Optional[uint32]:
"""
Returns the derivation path for the puzzle_hash.
Returns None if not present.
"""
async with self.db_wrapper.reader_no_transaction() as conn:
row = await execute_fetchone(
conn,
"SELECT derivation_index FROM derivation_paths WHERE puzzle_hash=? AND wallet_id=?;",
(
puzzle_hash.hex(),
wallet_id,
),
)
if row is not None:
return uint32(row[0])
return None
async def wallet_info_for_puzzle_hash(self, puzzle_hash: bytes32) -> Optional[Tuple[int, WalletType]]:
"""
Returns the derivation path for the puzzle_hash.
Returns None if not present.
"""
cached = self.wallet_info_for_ph_cache.get(puzzle_hash)
if cached is not None:
return cached
async with self.db_wrapper.reader_no_transaction() as conn:
row = await execute_fetchone(
conn, "SELECT wallet_type, wallet_id FROM derivation_paths WHERE puzzle_hash=?", (puzzle_hash.hex(),)
)
if row is not None:
self.wallet_info_for_ph_cache.put(puzzle_hash, (row[1], WalletType(row[0])))
return row[1], WalletType(row[0])
return None
async def get_all_puzzle_hashes(self) -> Set[bytes32]:
"""
Return a set containing all puzzle_hashes we generated.
"""
async with self.db_wrapper.reader_no_transaction() as conn:
rows = await conn.execute_fetchall("SELECT puzzle_hash FROM derivation_paths")
return set(bytes32.fromhex(row[0]) for row in rows)
async def get_last_derivation_path(self) -> Optional[uint32]:
"""
Returns the last derivation path by derivation_index.
"""
if self.last_derivation_index is not None:
return self.last_derivation_index
async with self.db_wrapper.reader_no_transaction() as conn:
row = await execute_fetchone(conn, "SELECT MAX(derivation_index) FROM derivation_paths")
last_derivation_index = None if row is None or row[0] is None else uint32(row[0])
self.last_derivation_index = last_derivation_index
return self.last_derivation_index
async def get_last_derivation_path_for_wallet(self, wallet_id: int) -> Optional[uint32]:
"""
Returns the last derivation path by derivation_index.
"""
cached_derivation_index: Optional[uint32] = self.last_wallet_derivation_index.get(uint32(wallet_id))
if cached_derivation_index is not None:
return cached_derivation_index
async with self.db_wrapper.reader_no_transaction() as conn:
row = await execute_fetchone(
conn, "SELECT MAX(derivation_index) FROM derivation_paths WHERE wallet_id=?", (wallet_id,)
)
derivation_index = None if row is None or row[0] is None else uint32(row[0])
if derivation_index is not None:
self.last_wallet_derivation_index[uint32(wallet_id)] = derivation_index
return derivation_index
async def get_current_derivation_record_for_wallet(self, wallet_id: uint32) -> Optional[DerivationRecord]:
"""
Returns the current derivation record by derivation_index.
"""
async with self.db_wrapper.reader_no_transaction() as conn:
row = await execute_fetchone(
conn,
"SELECT MAX(derivation_index) FROM derivation_paths WHERE wallet_id=? AND used=1 AND hardened=0",
(wallet_id,),
)
if row is not None and row[0] is not None:
index = uint32(row[0])
return await self.get_derivation_record(index, wallet_id, False)
return None
async def get_unused_derivation_path(self) -> Optional[uint32]:
"""
Returns the first unused derivation path by derivation_index.
"""
async with self.db_wrapper.reader_no_transaction() as conn:
row = await execute_fetchone(
conn, "SELECT MIN(derivation_index) FROM derivation_paths WHERE used=0 AND hardened=0;"
)
if row is not None and row[0] is not None:
return uint32(row[0])
return None
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/driver_protocol.py | flax/wallet/driver_protocol.py | from __future__ import annotations
from typing import Optional
from typing_extensions import Protocol
from flax.types.blockchain_format.program import Program
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.wallet.puzzle_drivers import PuzzleInfo, Solver
from flax.wallet.uncurried_puzzle import UncurriedPuzzle
class DriverProtocol(Protocol):
def match(self, puzzle: UncurriedPuzzle) -> Optional[PuzzleInfo]:
...
def get_inner_puzzle(self, constructor: PuzzleInfo, puzzle_reveal: UncurriedPuzzle) -> Optional[Program]:
...
def get_inner_solution(self, constructor: PuzzleInfo, solution: Program) -> Optional[Program]:
...
def asset_id(self, constructor: PuzzleInfo) -> Optional[bytes32]:
...
def construct(self, constructor: PuzzleInfo, inner_puzzle: Program) -> Program:
...
def solve(self, constructor: PuzzleInfo, solver: Solver, inner_puzzle: Program, inner_solution: Program) -> Program:
...
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/wallet_state_manager.py | flax/wallet/wallet_state_manager.py | import aiosqlite
import asyncio
import json
import logging
import multiprocessing.context
import time
from collections import defaultdict
from pathlib import Path
from secrets import token_bytes
from typing import Any, Callable, Dict, Iterator, List, Optional, Set, Tuple
from blspy import G1Element, PrivateKey
from flax.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward
from flax.consensus.coinbase import farmer_parent_id, pool_parent_id
from flax.consensus.constants import ConsensusConstants
from flax.data_layer.data_layer_wallet import DataLayerWallet
from flax.data_layer.dl_wallet_store import DataLayerStore
from flax.pools.pool_puzzles import SINGLETON_LAUNCHER_HASH, solution_to_pool_state
from flax.pools.pool_wallet import PoolWallet
from flax.protocols import wallet_protocol
from flax.protocols.wallet_protocol import CoinState
from flax.server.outbound_message import NodeType
from flax.server.server import FlaxServer
from flax.server.ws_connection import WSFlaxConnection
from flax.types.blockchain_format.coin import Coin
from flax.types.blockchain_format.program import Program
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.coin_spend import CoinSpend
from flax.types.full_block import FullBlock
from flax.types.mempool_inclusion_status import MempoolInclusionStatus
from flax.util.bech32m import encode_puzzle_hash
from flax.util.db_synchronous import db_synchronous_on
from flax.util.db_wrapper import DBWrapper2
from flax.util.errors import Err
from flax.util.ints import uint8, uint32, uint64, uint128
from flax.util.lru_cache import LRUCache
from flax.util.path import path_from_root
from flax.wallet.cat_wallet.cat_constants import DEFAULT_CATS
from flax.wallet.cat_wallet.cat_utils import construct_cat_puzzle, match_cat_puzzle
from flax.wallet.cat_wallet.cat_wallet import CATWallet
from flax.wallet.db_wallet.db_wallet_puzzles import MIRROR_PUZZLE_HASH
from flax.wallet.derivation_record import DerivationRecord
from flax.wallet.derive_keys import (
master_sk_to_wallet_sk,
master_sk_to_wallet_sk_unhardened,
master_sk_to_wallet_sk_intermediate,
_derive_path,
master_sk_to_wallet_sk_unhardened_intermediate,
_derive_path_unhardened,
)
from flax.wallet.wallet_protocol import WalletProtocol
from flax.wallet.did_wallet.did_wallet import DIDWallet
from flax.wallet.did_wallet.did_wallet_puzzles import DID_INNERPUZ_MOD, create_fullpuz, match_did_puzzle
from flax.wallet.key_val_store import KeyValStore
from flax.wallet.nft_wallet.nft_info import NFTWalletInfo
from flax.wallet.nft_wallet.nft_puzzles import get_metadata_and_phs, get_new_owner_did
from flax.wallet.nft_wallet.nft_wallet import NFTWallet
from flax.wallet.nft_wallet.uncurry_nft import UncurriedNFT
from flax.wallet.notification_manager import NotificationManager
from flax.wallet.outer_puzzles import AssetType
from flax.wallet.puzzle_drivers import PuzzleInfo
from flax.wallet.puzzles.cat_loader import CAT_MOD, CAT_MOD_HASH
from flax.wallet.settings.user_settings import UserSettings
from flax.wallet.trade_manager import TradeManager
from flax.wallet.transaction_record import TransactionRecord
from flax.wallet.util.address_type import AddressType
from flax.wallet.util.compute_hints import compute_coin_hints
from flax.wallet.util.transaction_type import TransactionType
from flax.wallet.util.wallet_sync_utils import last_change_height_cs, PeerRequestException
from flax.wallet.util.wallet_types import WalletType
from flax.wallet.wallet import Wallet
from flax.wallet.wallet_blockchain import WalletBlockchain
from flax.wallet.wallet_coin_record import WalletCoinRecord
from flax.wallet.wallet_coin_store import WalletCoinStore
from flax.wallet.wallet_info import WalletInfo
from flax.wallet.wallet_interested_store import WalletInterestedStore
from flax.wallet.wallet_nft_store import WalletNftStore
from flax.wallet.wallet_pool_store import WalletPoolStore
from flax.wallet.wallet_puzzle_store import WalletPuzzleStore
from flax.wallet.wallet_retry_store import WalletRetryStore
from flax.wallet.wallet_transaction_store import WalletTransactionStore
from flax.wallet.wallet_user_store import WalletUserStore
from flax.wallet.uncurried_puzzle import uncurry_puzzle
class WalletStateManager:
constants: ConsensusConstants
config: Dict
tx_store: WalletTransactionStore
puzzle_store: WalletPuzzleStore
user_store: WalletUserStore
nft_store: WalletNftStore
basic_store: KeyValStore
start_index: int
# Makes sure only one asyncio thread is changing the blockchain state at one time
lock: asyncio.Lock
log: logging.Logger
# TODO Don't allow user to send tx until wallet is synced
sync_mode: bool
sync_target: uint32
genesis: FullBlock
state_changed_callback: Optional[Callable]
pending_tx_callback: Optional[Callable]
puzzle_hash_created_callbacks: Dict = defaultdict(lambda *x: None)
db_path: Path
db_wrapper: DBWrapper2
main_wallet: Wallet
wallets: Dict[uint32, WalletProtocol]
private_key: PrivateKey
trade_manager: TradeManager
notification_manager: NotificationManager
new_wallet: bool
user_settings: UserSettings
blockchain: WalletBlockchain
coin_store: WalletCoinStore
interested_store: WalletInterestedStore
retry_store: WalletRetryStore
multiprocessing_context: multiprocessing.context.BaseContext
server: FlaxServer
root_path: Path
wallet_node: Any
pool_store: WalletPoolStore
dl_store: DataLayerStore
default_cats: Dict[str, Any]
asset_to_wallet_map: Dict[AssetType, Any]
initial_num_public_keys: int
@staticmethod
async def create(
private_key: PrivateKey,
config: Dict,
db_path: Path,
constants: ConsensusConstants,
server: FlaxServer,
root_path: Path,
wallet_node,
name: str = None,
):
self = WalletStateManager()
self.new_wallet = False
self.config = config
self.constants = constants
self.server = server
self.root_path = root_path
self.log = logging.getLogger(name if name else __name__)
self.lock = asyncio.Lock()
self.log.debug(f"Starting in db path: {db_path}")
sql_log_path: Optional[Path] = None
if self.config.get("log_sqlite_cmds", False):
sql_log_path = path_from_root(self.root_path, "log/wallet_sql.log")
self.log.info(f"logging SQL commands to {sql_log_path}")
self.db_wrapper = await DBWrapper2.create(
database=db_path,
reader_count=self.config.get("db_readers", 4),
log_path=sql_log_path,
# Never use pragma synchronous=OFF in Flax.
synchronous="FULL",
)
self.initial_num_public_keys = config["initial_num_public_keys"]
min_num_public_keys = 425
if not config.get("testing", False) and self.initial_num_public_keys < min_num_public_keys:
self.initial_num_public_keys = min_num_public_keys
self.coin_store = await WalletCoinStore.create(self.db_wrapper)
self.tx_store = await WalletTransactionStore.create(self.db_wrapper)
self.puzzle_store = await WalletPuzzleStore.create(self.db_wrapper)
self.user_store = await WalletUserStore.create(self.db_wrapper)
self.nft_store = await WalletNftStore.create(self.db_wrapper)
self.basic_store = await KeyValStore.create(self.db_wrapper)
self.trade_manager = await TradeManager.create(self, self.db_wrapper)
self.notification_manager = await NotificationManager.create(self, self.db_wrapper)
self.user_settings = await UserSettings.create(self.basic_store)
self.pool_store = await WalletPoolStore.create(self.db_wrapper)
self.dl_store = await DataLayerStore.create(self.db_wrapper)
self.interested_store = await WalletInterestedStore.create(self.db_wrapper)
self.retry_store = await WalletRetryStore.create(self.db_wrapper)
self.default_cats = DEFAULT_CATS
self.wallet_node = wallet_node
self.sync_mode = False
self.sync_target = uint32(0)
self.blockchain = await WalletBlockchain.create(self.basic_store, self.constants)
self.state_changed_callback = None
self.pending_tx_callback = None
self.db_path = db_path
main_wallet_info = await self.user_store.get_wallet_by_id(1)
assert main_wallet_info is not None
self.private_key = private_key
self.main_wallet = await Wallet.create(self, main_wallet_info)
self.wallets = {main_wallet_info.id: self.main_wallet}
self.asset_to_wallet_map = {
AssetType.CAT: CATWallet,
}
wallet = None
for wallet_info in await self.get_all_wallet_info_entries():
if wallet_info.type == WalletType.STANDARD_WALLET:
if wallet_info.id == 1:
continue
wallet = await Wallet.create(self, wallet_info)
elif wallet_info.type == WalletType.CAT:
wallet = await CATWallet.create(
self,
self.main_wallet,
wallet_info,
)
elif wallet_info.type == WalletType.DECENTRALIZED_ID:
wallet = await DIDWallet.create(
self,
self.main_wallet,
wallet_info,
)
elif wallet_info.type == WalletType.NFT:
wallet = await NFTWallet.create(
self,
self.main_wallet,
wallet_info,
)
elif wallet_info.type == WalletType.POOLING_WALLET:
wallet = await PoolWallet.create_from_db(
self,
self.main_wallet,
wallet_info,
)
elif wallet_info.type == WalletType.DATA_LAYER:
wallet = await DataLayerWallet.create(
self,
self.main_wallet,
wallet_info,
)
if wallet is not None:
self.wallets[wallet_info.id] = wallet
return self
def get_public_key(self, index: uint32) -> G1Element:
return master_sk_to_wallet_sk(self.private_key, index).get_g1()
def get_public_key_unhardened(self, index: uint32) -> G1Element:
return master_sk_to_wallet_sk_unhardened(self.private_key, index).get_g1()
async def get_keys(self, puzzle_hash: bytes32) -> Optional[Tuple[G1Element, PrivateKey]]:
record = await self.puzzle_store.record_for_puzzle_hash(puzzle_hash)
if record is None:
raise ValueError(f"No key for this puzzlehash {puzzle_hash})")
if record.hardened:
private = master_sk_to_wallet_sk(self.private_key, record.index)
pubkey = private.get_g1()
return pubkey, private
private = master_sk_to_wallet_sk_unhardened(self.private_key, record.index)
pubkey = private.get_g1()
return pubkey, private
async def create_more_puzzle_hashes(
self,
from_zero: bool = False,
in_transaction=False,
mark_existing_as_used=True,
up_to_index: Optional[uint32] = None,
num_additional_phs: Optional[int] = None,
):
"""
For all wallets in the user store, generates the first few puzzle hashes so
that we can restore the wallet from only the private keys.
"""
targets = list(self.wallets.keys())
self.log.debug("Target wallets to generate puzzle hashes for: %s", repr(targets))
unused: Optional[uint32] = (
uint32(up_to_index + 1) if up_to_index is not None else await self.puzzle_store.get_unused_derivation_path()
)
if unused is None:
# This handles the case where the database has entries but they have all been used
unused = await self.puzzle_store.get_last_derivation_path()
self.log.debug("Tried finding unused: %s", unused)
if unused is None:
# This handles the case where the database is empty
unused = uint32(0)
self.log.debug(f"Requested to generate puzzle hashes to at least index {unused}")
start_t = time.time()
to_generate = num_additional_phs if num_additional_phs is not None else self.initial_num_public_keys
new_paths: bool = False
for wallet_id in targets:
target_wallet = self.wallets[wallet_id]
if not target_wallet.require_derivation_paths():
self.log.debug("Skipping wallet %s as no derivation paths required", wallet_id)
continue
last: Optional[uint32] = await self.puzzle_store.get_last_derivation_path_for_wallet(wallet_id)
self.log.debug(
"Fetched last record for wallet %r: %s (from_zero=%r, unused=%r)", wallet_id, last, from_zero, unused
)
start_index = 0
derivation_paths: List[DerivationRecord] = []
if last is not None:
start_index = last + 1
# If the key was replaced (from_zero=True), we should generate the puzzle hashes for the new key
if from_zero:
start_index = 0
last_index = unused + to_generate
if start_index >= last_index:
self.log.debug(f"Nothing to create for for wallet_id: {wallet_id}, index: {start_index}")
else:
creating_msg = (
f"Creating puzzle hashes from {start_index} to {last_index - 1} for wallet_id: {wallet_id}"
)
self.log.info(f"Start: {creating_msg}")
intermediate_sk = master_sk_to_wallet_sk_intermediate(self.private_key)
intermediate_sk_un = master_sk_to_wallet_sk_unhardened_intermediate(self.private_key)
for index in range(start_index, last_index):
if WalletType(target_wallet.type()) == WalletType.POOLING_WALLET:
continue
# Hardened
pubkey: G1Element = _derive_path(intermediate_sk, [index]).get_g1()
puzzlehash: Optional[bytes32] = target_wallet.puzzle_hash_for_pk(pubkey)
if puzzlehash is None:
self.log.error(f"Unable to create puzzles with wallet {target_wallet}")
break
self.log.debug(f"Puzzle at index {index} wallet ID {wallet_id} puzzle hash {puzzlehash.hex()}")
new_paths = True
derivation_paths.append(
DerivationRecord(
uint32(index),
puzzlehash,
pubkey,
WalletType(target_wallet.type()),
uint32(target_wallet.id()),
True,
)
)
# Unhardened
pubkey_unhardened: G1Element = _derive_path_unhardened(intermediate_sk_un, [index]).get_g1()
puzzlehash_unhardened: Optional[bytes32] = target_wallet.puzzle_hash_for_pk(pubkey_unhardened)
if puzzlehash_unhardened is None:
self.log.error(f"Unable to create puzzles with wallet {target_wallet}")
break
self.log.debug(
f"Puzzle at index {index} wallet ID {wallet_id} puzzle hash {puzzlehash_unhardened.hex()}"
)
# We await sleep here to allow an asyncio context switch (since the other parts of this loop do
# not have await and therefore block). This can prevent networking layer from responding to ping.
await asyncio.sleep(0)
derivation_paths.append(
DerivationRecord(
uint32(index),
puzzlehash_unhardened,
pubkey_unhardened,
WalletType(target_wallet.type()),
uint32(target_wallet.id()),
False,
)
)
self.log.info(f"Done: {creating_msg} Time: {time.time() - start_t} seconds")
await self.puzzle_store.add_derivation_paths(derivation_paths)
await self.add_interested_puzzle_hashes(
[record.puzzle_hash for record in derivation_paths],
[record.wallet_id for record in derivation_paths],
)
if len(derivation_paths) > 0:
self.state_changed("new_derivation_index", data_object={"index": derivation_paths[-1].index})
# By default, we'll mark previously generated unused puzzle hashes as used if we have new paths
if mark_existing_as_used and unused > 0 and new_paths:
self.log.info(f"Updating last used derivation index: {unused - 1}")
await self.puzzle_store.set_used_up_to(uint32(unused - 1))
async def update_wallet_puzzle_hashes(self, wallet_id):
derivation_paths: List[DerivationRecord] = []
target_wallet = self.wallets[wallet_id]
last: Optional[uint32] = await self.puzzle_store.get_last_derivation_path_for_wallet(wallet_id)
unused: Optional[uint32] = await self.puzzle_store.get_unused_derivation_path()
if unused is None:
# This handles the case where the database has entries but they have all been used
unused = await self.puzzle_store.get_last_derivation_path()
if unused is None:
# This handles the case where the database is empty
unused = uint32(0)
if last is not None:
for index in range(unused, last):
# Since DID are not released yet we can assume they are only using unhardened keys derivation
pubkey: G1Element = self.get_public_key_unhardened(uint32(index))
puzzlehash: Optional[bytes32] = target_wallet.puzzle_hash_for_pk(pubkey)
self.log.info(f"Generating public key at index {index} puzzle hash {puzzlehash.hex()}")
derivation_paths.append(
DerivationRecord(
uint32(index),
puzzlehash,
pubkey,
target_wallet.wallet_info.type,
uint32(target_wallet.wallet_info.id),
False,
)
)
await self.puzzle_store.add_derivation_paths(derivation_paths)
async def get_unused_derivation_record(self, wallet_id: uint32, *, hardened=False) -> DerivationRecord:
"""
Creates a puzzle hash for the given wallet, and then makes more puzzle hashes
for every wallet to ensure we always have more in the database. Never reusue the
same public key more than once (for privacy).
"""
async with self.puzzle_store.lock:
# If we have no unused public keys, we will create new ones
unused: Optional[uint32] = await self.puzzle_store.get_unused_derivation_path()
if unused is None:
self.log.debug("No unused paths, generate more ")
await self.create_more_puzzle_hashes()
# Now we must have unused public keys
unused = await self.puzzle_store.get_unused_derivation_path()
assert unused is not None
self.log.debug("Fetching derivation record for: %s %s %s", unused, wallet_id, hardened)
record: Optional[DerivationRecord] = await self.puzzle_store.get_derivation_record(
unused, wallet_id, hardened
)
assert record is not None
# Set this key to used so we never use it again
await self.puzzle_store.set_used_up_to(record.index)
# Create more puzzle hashes / keys
await self.create_more_puzzle_hashes()
return record
async def get_current_derivation_record_for_wallet(self, wallet_id: uint32) -> Optional[DerivationRecord]:
async with self.puzzle_store.lock:
# If we have no unused public keys, we will create new ones
current: Optional[DerivationRecord] = await self.puzzle_store.get_current_derivation_record_for_wallet(
wallet_id
)
return current
def set_callback(self, callback: Callable):
"""
Callback to be called when the state of the wallet changes.
"""
self.state_changed_callback = callback
def set_pending_callback(self, callback: Callable):
"""
Callback to be called when new pending transaction enters the store
"""
self.pending_tx_callback = callback
def set_coin_with_puzzlehash_created_callback(self, puzzlehash: bytes32, callback: Callable):
"""
Callback to be called when new coin is seen with specified puzzlehash
"""
self.puzzle_hash_created_callbacks[puzzlehash] = callback
async def puzzle_hash_created(self, coin: Coin):
callback = self.puzzle_hash_created_callbacks[coin.puzzle_hash]
if callback is None:
return None
await callback(coin)
def state_changed(self, state: str, wallet_id: Optional[int] = None, data_object: Optional[Dict[str, Any]] = None):
"""
Calls the callback if it's present.
"""
if self.state_changed_callback is None:
return None
change_data: Dict[str, Any] = {"state": state}
if wallet_id is not None:
change_data["wallet_id"] = wallet_id
if data_object is not None:
change_data["additional_data"] = data_object
self.state_changed_callback(state, change_data)
def tx_pending_changed(self) -> None:
"""
Notifies the wallet node that there's new tx pending
"""
if self.pending_tx_callback is None:
return None
self.pending_tx_callback()
async def synced(self):
if len(self.server.get_connections(NodeType.FULL_NODE)) == 0:
return False
latest = await self.blockchain.get_peak_block()
if latest is None:
return False
if "simulator" in self.config.get("selected_network"):
return True # sim is always synced if we have a genesis block.
if latest.height - await self.blockchain.get_finished_sync_up_to() > 1:
return False
latest_timestamp = self.blockchain.get_latest_timestamp()
has_pending_queue_items = self.wallet_node.new_peak_queue.has_pending_data_process_items()
if latest_timestamp > int(time.time()) - 5 * 60 and not has_pending_queue_items:
return True
return False
def set_sync_mode(self, mode: bool, sync_height: uint32 = uint32(0)):
"""
Sets the sync mode. This changes the behavior of the wallet node.
"""
self.sync_mode = mode
self.sync_target = sync_height
self.state_changed("sync_changed")
async def get_confirmed_spendable_balance_for_wallet(self, wallet_id: int, unspent_records=None) -> uint128:
"""
Returns the balance amount of all coins that are spendable.
"""
spendable: Set[WalletCoinRecord] = await self.get_spendable_coins_for_wallet(wallet_id, unspent_records)
spendable_amount: uint128 = uint128(0)
for record in spendable:
spendable_amount = uint128(spendable_amount + record.coin.amount)
return spendable_amount
async def does_coin_belong_to_wallet(self, coin: Coin, wallet_id: int) -> bool:
"""
Returns true if we have the key for this coin.
"""
info = await self.puzzle_store.wallet_info_for_puzzle_hash(coin.puzzle_hash)
if info is None:
return False
coin_wallet_id, wallet_type = info
if wallet_id == coin_wallet_id:
return True
return False
async def get_confirmed_balance_for_wallet(
self,
wallet_id: int,
unspent_coin_records: Optional[Set[WalletCoinRecord]] = None,
) -> uint128:
"""
Returns the confirmed balance, including coinbase rewards that are not spendable.
"""
# lock only if unspent_coin_records is None
if unspent_coin_records is None:
unspent_coin_records = await self.coin_store.get_unspent_coins_for_wallet(wallet_id)
return uint128(sum(cr.coin.amount for cr in unspent_coin_records))
async def get_unconfirmed_balance(
self, wallet_id: int, unspent_coin_records: Optional[Set[WalletCoinRecord]] = None
) -> uint128:
"""
Returns the balance, including coinbase rewards that are not spendable, and unconfirmed
transactions.
"""
# This API should change so that get_balance_from_coin_records is called for Set[WalletCoinRecord]
# and this method is called only for the unspent_coin_records==None case.
if unspent_coin_records is None:
unspent_coin_records = await self.coin_store.get_unspent_coins_for_wallet(wallet_id)
unconfirmed_tx: List[TransactionRecord] = await self.tx_store.get_unconfirmed_for_wallet(wallet_id)
all_unspent_coins: Set[Coin] = {cr.coin for cr in unspent_coin_records}
for record in unconfirmed_tx:
for addition in record.additions:
# This change or a self transaction
if await self.does_coin_belong_to_wallet(addition, wallet_id):
all_unspent_coins.add(addition)
for removal in record.removals:
if await self.does_coin_belong_to_wallet(removal, wallet_id) and removal in all_unspent_coins:
all_unspent_coins.remove(removal)
return uint128(sum(coin.amount for coin in all_unspent_coins))
async def unconfirmed_removals_for_wallet(self, wallet_id: int) -> Dict[bytes32, Coin]:
"""
Returns new removals transactions that have not been confirmed yet.
"""
removals: Dict[bytes32, Coin] = {}
unconfirmed_tx = await self.tx_store.get_unconfirmed_for_wallet(wallet_id)
for record in unconfirmed_tx:
for coin in record.removals:
removals[coin.name()] = coin
return removals
async def determine_coin_type(
self, peer: WSFlaxConnection, coin_state: CoinState, fork_height: Optional[uint32]
) -> Tuple[Optional[uint32], Optional[WalletType]]:
if coin_state.created_height is not None and (
self.is_pool_reward(uint32(coin_state.created_height), coin_state.coin)
or self.is_farmer_reward(uint32(coin_state.created_height), coin_state.coin)
):
return None, None
response: List[CoinState] = await self.wallet_node.get_coin_state(
[coin_state.coin.parent_coin_info], peer=peer, fork_height=fork_height
)
if len(response) == 0:
self.log.warning(f"Could not find a parent coin with ID: {coin_state.coin.parent_coin_info}")
return None, None
parent_coin_state = response[0]
assert parent_coin_state.spent_height == coin_state.created_height
coin_spend: Optional[CoinSpend] = await self.wallet_node.fetch_puzzle_solution(
parent_coin_state.spent_height, parent_coin_state.coin, peer
)
if coin_spend is None:
return None, None
puzzle = Program.from_bytes(bytes(coin_spend.puzzle_reveal))
uncurried = uncurry_puzzle(puzzle)
# Check if the coin is a CAT
cat_curried_args = match_cat_puzzle(uncurried)
if cat_curried_args is not None:
return await self.handle_cat(cat_curried_args, parent_coin_state, coin_state, coin_spend)
# Check if the coin is a NFT
# hint
# First spend where 1 mojo coin -> Singleton launcher -> NFT -> NFT
uncurried_nft = UncurriedNFT.uncurry(uncurried.mod, uncurried.args)
if uncurried_nft is not None:
return await self.handle_nft(coin_spend, uncurried_nft, parent_coin_state)
# Check if the coin is a DID
did_curried_args = match_did_puzzle(uncurried.mod, uncurried.args)
if did_curried_args is not None:
return await self.handle_did(did_curried_args, parent_coin_state, coin_state, coin_spend, peer)
await self.notification_manager.potentially_add_new_notification(coin_state, coin_spend)
return None, None
async def filter_spam(self, new_coin_state: List[CoinState]) -> List[CoinState]:
xfx_spam_amount = self.config.get("xfx_spam_amount", 1000000)
# No need to filter anything if the filter is set to 1 or 0 mojos
if xfx_spam_amount <= 1:
return new_coin_state
spam_filter_after_n_txs = self.config.get("spam_filter_after_n_txs", 200)
small_unspent_count = await self.coin_store.count_small_unspent(xfx_spam_amount)
# if small_unspent_count > spam_filter_after_n_txs:
filtered_cs: List[CoinState] = []
is_standard_wallet_phs: Set[bytes32] = set()
for cs in new_coin_state:
# Only apply filter to new coins being sent to our wallet, that are very small
if (
cs.created_height is not None
and cs.spent_height is None
and cs.coin.amount < xfx_spam_amount
and (cs.coin.puzzle_hash in is_standard_wallet_phs or await self.is_standard_wallet_tx(cs))
):
is_standard_wallet_phs.add(cs.coin.puzzle_hash)
if small_unspent_count < spam_filter_after_n_txs:
filtered_cs.append(cs)
small_unspent_count += 1
else:
filtered_cs.append(cs)
return filtered_cs
async def is_standard_wallet_tx(self, coin_state: CoinState) -> bool:
wallet_info: Optional[Tuple[uint32, WalletType]] = await self.get_wallet_id_for_puzzle_hash(
coin_state.coin.puzzle_hash
)
if wallet_info is not None and wallet_info[1] == WalletType.STANDARD_WALLET:
return True
return False
async def handle_cat(
self,
curried_args: Iterator[Program],
parent_coin_state: CoinState,
coin_state: CoinState,
coin_spend: CoinSpend,
) -> Tuple[Optional[uint32], Optional[WalletType]]:
"""
Handle the new coin when it is a CAT
:param curried_args: Curried arg of the CAT mod
:param parent_coin_state: Parent coin state
:param coin_state: Current coin state
:param coin_spend: New coin spend
:return: Wallet ID & Wallet Type
"""
wallet_id = None
wallet_type = None
mod_hash, tail_hash, inner_puzzle = curried_args
hint_list = compute_coin_hints(coin_spend)
derivation_record = None
for hint in hint_list:
derivation_record = await self.puzzle_store.get_derivation_record_for_puzzle_hash(bytes32(hint))
if derivation_record is not None:
break
if derivation_record is None:
self.log.info(f"Received state for the coin that doesn't belong to us {coin_state}")
else:
our_inner_puzzle: Program = self.main_wallet.puzzle_for_pk(derivation_record.pubkey)
asset_id: bytes32 = bytes32(bytes(tail_hash)[1:])
cat_puzzle = construct_cat_puzzle(CAT_MOD, asset_id, our_inner_puzzle, CAT_MOD_HASH)
if cat_puzzle.get_tree_hash() != coin_state.coin.puzzle_hash:
return None, None
if bytes(tail_hash).hex()[2:] in self.default_cats or self.config.get(
"automatically_add_unknown_cats", False
):
cat_wallet = await CATWallet.create_wallet_for_cat(self, self.main_wallet, bytes(tail_hash).hex()[2:])
wallet_id = cat_wallet.id()
wallet_type = WalletType(cat_wallet.type())
self.state_changed("wallet_created")
else:
# Found unacknowledged CAT, save it in the database.
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | true |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/notification_store.py | flax/wallet/notification_store.py | from __future__ import annotations
import dataclasses
import logging
from typing import List, Optional, Tuple
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.db_wrapper import DBWrapper2
from flax.util.ints import uint32, uint64
@dataclasses.dataclass(frozen=True)
class Notification:
coin_id: bytes32
message: bytes
amount: uint64
class NotificationStore:
"""
NotificationStore stores trading history.
"""
cache_size: uint32
db_wrapper: DBWrapper2
log: logging.Logger
@classmethod
async def create(
cls, db_wrapper: DBWrapper2, cache_size: uint32 = uint32(600000), name: Optional[str] = None
) -> "NotificationStore":
self = cls()
if name:
self.log = logging.getLogger(name)
else:
self.log = logging.getLogger(__name__)
self.cache_size = cache_size
self.db_wrapper = db_wrapper
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute(
"CREATE TABLE IF NOT EXISTS notifications(" "coin_id blob PRIMARY KEY," "msg blob," "amount blob" ")"
)
await conn.execute("CREATE INDEX IF NOT EXISTS coin_id_index on notifications(coin_id)")
return self
async def add_notification(self, notification: Notification) -> None:
"""
Store Notification into DB
"""
async with self.db_wrapper.writer_maybe_transaction() as conn:
cursor = await conn.execute(
"INSERT OR REPLACE INTO notifications " "(coin_id, msg, amount) " "VALUES(?, ?, ?)",
(
notification.coin_id,
notification.message,
bytes(notification.amount),
),
)
await cursor.close()
async def get_notifications(self, coin_ids: List[bytes32]) -> List[Notification]:
"""
Checks DB for Notification with id: id and returns it.
"""
coin_ids_str_list = "("
for _ in coin_ids:
coin_ids_str_list += "?"
coin_ids_str_list += ","
coin_ids_str_list = coin_ids_str_list[:-1] if len(coin_ids_str_list) > 1 else "("
coin_ids_str_list += ")"
async with self.db_wrapper.reader_no_transaction() as conn:
rows = await conn.execute_fetchall(
f"SELECT * from notifications WHERE coin_id IN {coin_ids_str_list} ORDER BY amount DESC", coin_ids
)
return [
Notification(
bytes32(row[0]),
bytes(row[1]),
uint64.from_bytes(row[2]),
)
for row in rows
]
async def get_all_notifications(
self, pagination: Optional[Tuple[Optional[int], Optional[int]]] = None
) -> List[Notification]:
"""
Checks DB for Notification with id: id and returns it.
"""
if pagination is not None:
if pagination[1] is not None and pagination[0] is not None:
pagination_str = f" LIMIT {pagination[0]}, {pagination[1] - pagination[0]}"
elif pagination[1] is None and pagination[0] is not None:
pagination_str = f" LIMIT {pagination[0]}, (SELECT COUNT(*) from notifications)"
elif pagination[1] is not None and pagination[0] is None:
pagination_str = f" LIMIT {pagination[1]}"
else:
pagination_str = ""
else:
pagination_str = ""
async with self.db_wrapper.reader_no_transaction() as conn:
rows = await conn.execute_fetchall(f"SELECT * from notifications ORDER BY amount DESC{pagination_str}")
return [
Notification(
bytes32(row[0]),
bytes(row[1]),
uint64.from_bytes(row[2]),
)
for row in rows
]
async def delete_notifications(self, coin_ids: List[bytes32]) -> None:
coin_ids_str_list = "("
for _ in coin_ids:
coin_ids_str_list += "?"
coin_ids_str_list += ","
coin_ids_str_list = coin_ids_str_list[:-1] if len(coin_ids_str_list) > 1 else "("
coin_ids_str_list += ")"
async with self.db_wrapper.writer_maybe_transaction() as conn:
# Delete from storage
cursor = await conn.execute(f"DELETE FROM notifications WHERE coin_id IN {coin_ids_str_list}", coin_ids)
await cursor.close()
async def delete_all_notifications(self) -> None:
async with self.db_wrapper.writer_maybe_transaction() as conn:
# Delete from storage
cursor = await conn.execute("DELETE FROM notifications")
await cursor.close()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/trade_record.py | flax/wallet/trade_record.py | from dataclasses import dataclass
from typing import List, Optional, Tuple, Dict, Any
from flax.types.blockchain_format.coin import Coin
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.ints import uint8, uint32, uint64
from flax.util.streamable import Streamable, streamable
from flax.wallet.trading.offer import Offer
from flax.wallet.trading.trade_status import TradeStatus
@streamable
@dataclass(frozen=True)
class TradeRecord(Streamable):
"""
Used for storing transaction data and status in wallets.
"""
confirmed_at_index: uint32
accepted_at_time: Optional[uint64]
created_at_time: uint64
is_my_offer: bool
sent: uint32
offer: bytes
taken_offer: Optional[bytes]
coins_of_interest: List[Coin]
trade_id: bytes32
status: uint32 # TradeStatus, enum not streamable
sent_to: List[Tuple[str, uint8, Optional[str]]] # MempoolSubmissionStatus.status enum not streamable
def to_json_dict_convenience(self) -> Dict[str, Any]:
formatted = self.to_json_dict()
formatted["status"] = TradeStatus(self.status).name
offer_to_summarize: bytes = self.offer if self.taken_offer is None else self.taken_offer
offer = Offer.from_bytes(offer_to_summarize)
offered, requested, infos = offer.summary()
formatted["summary"] = {
"offered": offered,
"requested": requested,
"infos": infos,
"fees": offer.bundle.fees(),
}
formatted["pending"] = offer.get_pending_amounts()
del formatted["offer"]
return formatted
@classmethod
def from_json_dict_convenience(cls, record: Dict[str, Any], offer: str = "") -> "TradeRecord":
new_record = record.copy()
new_record["status"] = TradeStatus[record["status"]].value
del new_record["summary"]
del new_record["pending"]
new_record["offer"] = offer
return cls.from_json_dict(new_record)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/wallet_coin_record.py | flax/wallet/wallet_coin_record.py | from __future__ import annotations
from dataclasses import dataclass
from flax.types.blockchain_format.coin import Coin
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.ints import uint32
from flax.wallet.util.wallet_types import WalletType
@dataclass(frozen=True)
class WalletCoinRecord:
"""
These are values that correspond to a CoinName that are used
in keeping track of the unspent database.
"""
coin: Coin
confirmed_block_height: uint32
spent_block_height: uint32
spent: bool
coinbase: bool
wallet_type: WalletType
wallet_id: int
def name(self) -> bytes32:
return self.coin.name()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/wallet_interested_store.py | flax/wallet/wallet_interested_store.py | from typing import List, Tuple, Optional
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.db_wrapper import DBWrapper2
from flax.util.ints import uint32
class WalletInterestedStore:
"""
Stores coin ids that we are interested in receiving
"""
db_wrapper: DBWrapper2
@classmethod
async def create(cls, wrapper: DBWrapper2):
self = cls()
self.db_wrapper = wrapper
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute("CREATE TABLE IF NOT EXISTS interested_coins(coin_name text PRIMARY KEY)")
await conn.execute(
"CREATE TABLE IF NOT EXISTS interested_puzzle_hashes(puzzle_hash text PRIMARY KEY, wallet_id integer)"
)
# Table for unknown CATs
fields = "asset_id text PRIMARY KEY, name text, first_seen_height integer, sender_puzzle_hash text"
await conn.execute(f"CREATE TABLE IF NOT EXISTS unacknowledged_asset_tokens({fields})")
return self
async def get_interested_coin_ids(self) -> List[bytes32]:
async with self.db_wrapper.writer_maybe_transaction() as conn:
cursor = await conn.execute("SELECT coin_name FROM interested_coins")
rows_hex = await cursor.fetchall()
return [bytes32(bytes.fromhex(row[0])) for row in rows_hex]
async def add_interested_coin_id(self, coin_id: bytes32) -> None:
async with self.db_wrapper.writer_maybe_transaction() as conn:
cursor = await conn.execute("INSERT OR REPLACE INTO interested_coins VALUES (?)", (coin_id.hex(),))
await cursor.close()
async def get_interested_puzzle_hashes(self) -> List[Tuple[bytes32, int]]:
async with self.db_wrapper.reader_no_transaction() as conn:
cursor = await conn.execute("SELECT puzzle_hash, wallet_id FROM interested_puzzle_hashes")
rows_hex = await cursor.fetchall()
return [(bytes32(bytes.fromhex(row[0])), row[1]) for row in rows_hex]
async def get_interested_puzzle_hash_wallet_id(self, puzzle_hash: bytes32) -> Optional[int]:
async with self.db_wrapper.reader_no_transaction() as conn:
cursor = await conn.execute(
"SELECT wallet_id FROM interested_puzzle_hashes WHERE puzzle_hash=?", (puzzle_hash.hex(),)
)
row = await cursor.fetchone()
if row is None:
return None
return row[0]
async def add_interested_puzzle_hash(self, puzzle_hash: bytes32, wallet_id: int) -> None:
async with self.db_wrapper.writer_maybe_transaction() as conn:
cursor = await conn.execute(
"INSERT OR REPLACE INTO interested_puzzle_hashes VALUES (?, ?)", (puzzle_hash.hex(), wallet_id)
)
await cursor.close()
async def remove_interested_puzzle_hash(self, puzzle_hash: bytes32) -> None:
async with self.db_wrapper.writer_maybe_transaction() as conn:
cursor = await conn.execute(
"DELETE FROM interested_puzzle_hashes WHERE puzzle_hash=?", (puzzle_hash.hex(),)
)
await cursor.close()
async def add_unacknowledged_token(
self,
asset_id: bytes32,
name: str,
first_seen_height: Optional[uint32],
sender_puzzle_hash: bytes32,
) -> None:
"""
Add an unacknowledged CAT to the database. It will only be inserted once at the first time.
:param asset_id: CAT asset ID
:param name: Name of the CAT, for now it will be unknown until we integrate the CAT name service
:param first_seen_height: The block height of the wallet received this CAT in the first time
:param sender_puzzle_hash: The puzzle hash of the sender
:return: None
"""
async with self.db_wrapper.writer_maybe_transaction() as conn:
cursor = await conn.execute(
"INSERT OR IGNORE INTO unacknowledged_asset_tokens VALUES (?, ?, ?, ?)",
(
asset_id.hex(),
name,
first_seen_height if first_seen_height is not None else 0,
sender_puzzle_hash.hex(),
),
)
await cursor.close()
async def get_unacknowledged_tokens(self) -> List:
"""
Get a list of all unacknowledged CATs
:return: A json style list of unacknowledged CATs
"""
async with self.db_wrapper.reader_no_transaction() as conn:
cursor = await conn.execute(
"SELECT asset_id, name, first_seen_height, sender_puzzle_hash FROM unacknowledged_asset_tokens"
)
cats = await cursor.fetchall()
return [
{"asset_id": cat[0], "name": cat[1], "first_seen_height": cat[2], "sender_puzzle_hash": cat[3]}
for cat in cats
]
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/transaction_record.py | flax/wallet/transaction_record.py | from dataclasses import dataclass
from typing import Generic, List, Optional, Tuple, TypeVar, Dict
from flax.consensus.coinbase import pool_parent_id, farmer_parent_id
from flax.types.blockchain_format.coin import Coin
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.mempool_inclusion_status import MempoolInclusionStatus
from flax.types.spend_bundle import SpendBundle
from flax.util.bech32m import encode_puzzle_hash, decode_puzzle_hash
from flax.util.ints import uint8, uint32, uint64
from flax.util.streamable import Streamable, streamable
from flax.wallet.util.transaction_type import TransactionType
T = TypeVar("T")
@dataclass
class ItemAndTransactionRecords(Generic[T]):
item: T
transaction_records: List["TransactionRecord"]
@streamable
@dataclass(frozen=True)
class TransactionRecord(Streamable):
"""
Used for storing transaction data and status in wallets.
"""
confirmed_at_height: uint32
created_at_time: uint64
to_puzzle_hash: bytes32
amount: uint64
fee_amount: uint64
confirmed: bool
sent: uint32
spend_bundle: Optional[SpendBundle]
additions: List[Coin]
removals: List[Coin]
wallet_id: uint32
# Represents the list of peers that we sent the transaction to, whether each one
# included it in the mempool, and what the error message (if any) was
sent_to: List[Tuple[str, uint8, Optional[str]]]
trade_id: Optional[bytes32]
type: uint32 # TransactionType
# name is also called bundle_id and tx_id
name: bytes32
memos: List[Tuple[bytes32, List[bytes]]]
def is_in_mempool(self) -> bool:
# If one of the nodes we sent it to responded with success, we set it to success
for (_, mis, _) in self.sent_to:
if MempoolInclusionStatus(mis) == MempoolInclusionStatus.SUCCESS:
return True
# Note, transactions pending inclusion (pending) return false
return False
def height_farmed(self, genesis_challenge: bytes32) -> Optional[uint32]:
if not self.confirmed:
return None
if self.type == TransactionType.FEE_REWARD or self.type == TransactionType.COINBASE_REWARD:
for block_index in range(self.confirmed_at_height, self.confirmed_at_height - 100, -1):
if block_index < 0:
return None
pool_parent = pool_parent_id(uint32(block_index), genesis_challenge)
farmer_parent = farmer_parent_id(uint32(block_index), genesis_challenge)
if pool_parent == self.additions[0].parent_coin_info:
return uint32(block_index)
if farmer_parent == self.additions[0].parent_coin_info:
return uint32(block_index)
return None
def get_memos(self) -> Dict[bytes32, List[bytes]]:
return {coin_id: ms for coin_id, ms in self.memos}
@classmethod
def from_json_dict_convenience(cls, modified_tx_input: Dict):
modified_tx = modified_tx_input.copy()
if "to_address" in modified_tx:
modified_tx["to_puzzle_hash"] = decode_puzzle_hash(modified_tx["to_address"]).hex()
if "to_address" in modified_tx:
del modified_tx["to_address"]
# Converts memos from a flat dict into a nested list
memos_dict: Dict[str, List[str]] = {}
memos_list: List = []
if "memos" in modified_tx:
for coin_id, memo in modified_tx["memos"].items():
if coin_id not in memos_dict:
memos_dict[coin_id] = []
memos_dict[coin_id].append(memo)
for coin_id, memos in memos_dict.items():
memos_list.append((coin_id, memos))
modified_tx["memos"] = memos_list
return cls.from_json_dict(modified_tx)
def to_json_dict_convenience(self, config: Dict) -> Dict:
selected = config["selected_network"]
prefix = config["network_overrides"]["config"][selected]["address_prefix"]
formatted = self.to_json_dict()
formatted["to_address"] = encode_puzzle_hash(self.to_puzzle_hash, prefix)
formatted["memos"] = {
coin_id.hex(): memo.hex()
for coin_id, memos in self.get_memos().items()
for memo in memos
if memo is not None
}
return formatted
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/wallet_retry_store.py | flax/wallet/wallet_retry_store.py | from __future__ import annotations
from typing import List, Optional, Tuple
from chia_rs import CoinState
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.db_wrapper import DBWrapper2
from flax.util.ints import uint32
class WalletRetryStore:
"""
Persistent coin states that we have received but failed to add
"""
db_wrapper: DBWrapper2
@classmethod
async def create(cls, db_wrapper: DBWrapper2) -> "WalletRetryStore":
self = cls()
self.db_wrapper = db_wrapper
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute(
"CREATE TABLE IF NOT EXISTS retry_store(" " coin_state blob," " peer blob," " fork_height int)"
)
return self
async def get_all_states_to_retry(self) -> List[Tuple[CoinState, bytes32, uint32]]:
"""
Return all states that were failed to sync
"""
async with self.db_wrapper.reader_no_transaction() as conn:
rows = await conn.execute_fetchall("SELECT * from retry_store")
return [(CoinState.from_bytes(row[0]), bytes32(row[1]), uint32(row[2])) for row in rows]
async def add_state(self, state: CoinState, peer_id: bytes32, fork_height: Optional[uint32]) -> None:
"""
Adds object to key val store. Obj MUST support __bytes__ and bytes() methods.
"""
async with self.db_wrapper.writer_maybe_transaction() as conn:
cursor = await conn.execute(
"INSERT OR IGNORE INTO retry_store VALUES(?, ?, ?)",
(bytes(state), peer_id, 0 if fork_height is None else fork_height),
)
await cursor.close()
async def remove_state(self, state: CoinState) -> None:
async with self.db_wrapper.writer_maybe_transaction() as conn:
cursor = await conn.execute("DELETE FROM retry_store where coin_state=?", (bytes(state),))
await cursor.close()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/wallet_blockchain.py | flax/wallet/wallet_blockchain.py | import logging
from typing import Dict, Optional, Tuple, List
from flax.consensus.block_header_validation import validate_finished_header_block
from flax.consensus.block_record import BlockRecord
from flax.consensus.blockchain import ReceiveBlockResult
from flax.consensus.blockchain_interface import BlockchainInterface
from flax.consensus.constants import ConsensusConstants
from flax.consensus.find_fork_point import find_fork_point_in_chain
from flax.consensus.full_block_to_block_record import block_to_block_record
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.header_block import HeaderBlock
from flax.types.weight_proof import WeightProof
from flax.util.errors import Err
from flax.util.ints import uint32, uint64
from flax.wallet.key_val_store import KeyValStore
from flax.wallet.wallet_weight_proof_handler import WalletWeightProofHandler
log = logging.getLogger(__name__)
class WalletBlockchain(BlockchainInterface):
constants: ConsensusConstants
_basic_store: KeyValStore
_weight_proof_handler: WalletWeightProofHandler
synced_weight_proof: Optional[WeightProof]
_finished_sync_up_to: uint32
_peak: Optional[HeaderBlock]
_height_to_hash: Dict[uint32, bytes32]
_block_records: Dict[bytes32, BlockRecord]
_latest_timestamp: uint64
_sub_slot_iters: uint64
_difficulty: uint64
CACHE_SIZE: int
@staticmethod
async def create(_basic_store: KeyValStore, constants: ConsensusConstants):
"""
Initializes a blockchain with the BlockRecords from disk, assuming they have all been
validated. Uses the genesis block given in override_constants, or as a fallback,
in the consensus constants config.
"""
self = WalletBlockchain()
self._basic_store = _basic_store
self.constants = constants
self.CACHE_SIZE = constants.SUB_EPOCH_BLOCKS * 3
self.synced_weight_proof = await self._basic_store.get_object("SYNCED_WEIGHT_PROOF", WeightProof)
self._finished_sync_up_to = await self._basic_store.get_object("FINISHED_SYNC_UP_TO", uint32)
if self._finished_sync_up_to is None:
self._finished_sync_up_to = uint32(0)
self._peak = None
self._peak = await self.get_peak_block()
self._latest_timestamp = uint64(0)
self._height_to_hash = {}
self._block_records = {}
self._sub_slot_iters = constants.SUB_SLOT_ITERS_STARTING
self._difficulty = constants.DIFFICULTY_STARTING
return self
async def new_valid_weight_proof(self, weight_proof: WeightProof, records: List[BlockRecord]) -> None:
peak: Optional[HeaderBlock] = await self.get_peak_block()
if peak is not None and weight_proof.recent_chain_data[-1].weight <= peak.weight:
# No update, don't change anything
return None
self.synced_weight_proof = weight_proof
await self._basic_store.set_object("SYNCED_WEIGHT_PROOF", weight_proof)
latest_timestamp = self._latest_timestamp
for record in records:
self._height_to_hash[record.height] = record.header_hash
self.add_block_record(record)
if record.is_transaction_block:
assert record.timestamp is not None
if record.timestamp > latest_timestamp:
latest_timestamp = record.timestamp
self._sub_slot_iters = records[-1].sub_slot_iters
self._difficulty = uint64(records[-1].weight - records[-2].weight)
await self.set_peak_block(weight_proof.recent_chain_data[-1], latest_timestamp)
await self.clean_block_records()
async def receive_block(self, block: HeaderBlock) -> Tuple[ReceiveBlockResult, Optional[Err]]:
if self.contains_block(block.header_hash):
return ReceiveBlockResult.ALREADY_HAVE_BLOCK, None
if not self.contains_block(block.prev_header_hash) and block.height > 0:
return ReceiveBlockResult.DISCONNECTED_BLOCK, None
if (
len(block.finished_sub_slots) > 0
and block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters is not None
):
assert block.finished_sub_slots[0].challenge_chain.new_difficulty is not None # They both change together
sub_slot_iters: uint64 = block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters
difficulty: uint64 = block.finished_sub_slots[0].challenge_chain.new_difficulty
else:
sub_slot_iters = self._sub_slot_iters
difficulty = self._difficulty
# Validation requires a block cache (self) that goes back to a subepoch barrier
required_iters, error = validate_finished_header_block(
self.constants, self, block, False, difficulty, sub_slot_iters, False
)
if error is not None:
return ReceiveBlockResult.INVALID_BLOCK, error.code
if required_iters is None:
return ReceiveBlockResult.INVALID_BLOCK, Err.INVALID_POSPACE
# We are passing in sub_slot_iters here so we don't need to backtrack until the start of the epoch to find
# the sub slot iters and difficulty. This allows us to keep the cache small.
block_record: BlockRecord = block_to_block_record(
self.constants, self, required_iters, None, block, sub_slot_iters
)
self.add_block_record(block_record)
if self._peak is None:
if block_record.is_transaction_block:
latest_timestamp = block_record.timestamp
else:
latest_timestamp = None
self._height_to_hash[block_record.height] = block_record.header_hash
await self.set_peak_block(block, latest_timestamp)
return ReceiveBlockResult.NEW_PEAK, None
elif block_record.weight > self._peak.weight:
if block_record.prev_hash == self._peak.header_hash:
fork_height: int = self._peak.height
else:
fork_height = find_fork_point_in_chain(self, block_record, self._peak)
await self._rollback_to_height(fork_height)
curr_record: BlockRecord = block_record
latest_timestamp = self._latest_timestamp
while curr_record.height > fork_height:
self._height_to_hash[curr_record.height] = curr_record.header_hash
if curr_record.timestamp is not None and curr_record.timestamp > latest_timestamp:
latest_timestamp = curr_record.timestamp
if curr_record.height == 0:
break
curr_record = self.block_record(curr_record.prev_hash)
self._sub_slot_iters = block_record.sub_slot_iters
self._difficulty = uint64(block_record.weight - self.block_record(block_record.prev_hash).weight)
await self.set_peak_block(block, latest_timestamp)
await self.clean_block_records()
return ReceiveBlockResult.NEW_PEAK, None
return ReceiveBlockResult.ADDED_AS_ORPHAN, None
async def _rollback_to_height(self, height: int):
if self._peak is None:
return
for h in range(max(0, height + 1), self._peak.height + 1):
del self._height_to_hash[uint32(h)]
await self._basic_store.remove_object("PEAK_BLOCK")
async def set_peak_block(self, block: HeaderBlock, timestamp: Optional[uint64] = None):
await self._basic_store.set_object("PEAK_BLOCK", block)
self._peak = block
if timestamp is not None:
self._latest_timestamp = timestamp
elif block.foliage_transaction_block is not None:
self._latest_timestamp = block.foliage_transaction_block.timestamp
log.info(f"Peak set to: {self._peak.height} timestamp: {self._latest_timestamp}")
async def get_peak_block(self) -> Optional[HeaderBlock]:
if self._peak is not None:
return self._peak
return await self._basic_store.get_object("PEAK_BLOCK", HeaderBlock)
async def set_finished_sync_up_to(self, height: int, *, in_rollback=False):
if (in_rollback and height >= 0) or (height > await self.get_finished_sync_up_to()):
await self._basic_store.set_object("FINISHED_SYNC_UP_TO", uint32(height))
await self.clean_block_records()
async def get_finished_sync_up_to(self) -> uint32:
h: Optional[uint32] = await self._basic_store.get_object("FINISHED_SYNC_UP_TO", uint32)
if h is None:
return uint32(0)
return h
def get_latest_timestamp(self) -> uint64:
return self._latest_timestamp
def contains_block(self, header_hash: bytes32) -> bool:
return header_hash in self._block_records
def contains_height(self, height: uint32) -> bool:
return height in self._height_to_hash
def height_to_hash(self, height: uint32) -> bytes32:
return self._height_to_hash[height]
def try_block_record(self, header_hash: bytes32) -> Optional[BlockRecord]:
if self.contains_block(header_hash):
return self.block_record(header_hash)
return None
def block_record(self, header_hash: bytes32) -> BlockRecord:
return self._block_records[header_hash]
def add_block_record(self, block_record: BlockRecord):
self._block_records[block_record.header_hash] = block_record
async def clean_block_records(self):
"""
Cleans the cache so that we only maintain relevant blocks. This removes
block records that have height < peak - CACHE_SIZE.
"""
height_limit = max(0, (await self.get_finished_sync_up_to()) - self.CACHE_SIZE)
if len(self._block_records) < self.CACHE_SIZE:
return None
to_remove: List[bytes32] = []
for header_hash, block_record in self._block_records.items():
if block_record.height < height_limit:
to_remove.append(header_hash)
for header_hash in to_remove:
del self._block_records[header_hash]
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/wallet_weight_proof_handler.py | flax/wallet/wallet_weight_proof_handler.py | import asyncio
import logging
import tempfile
from concurrent.futures.process import ProcessPoolExecutor
from multiprocessing.context import BaseContext
from typing import IO, List, Tuple, Optional
from flax.consensus.block_record import BlockRecord
from flax.consensus.constants import ConsensusConstants
from flax.full_node.weight_proof import (
_validate_sub_epoch_summaries,
validate_weight_proof_inner,
)
from flax.types.blockchain_format.sub_epoch_summary import SubEpochSummary
from flax.types.weight_proof import (
WeightProof,
)
from flax.util.ints import uint32
from flax.util.setproctitle import getproctitle, setproctitle
log = logging.getLogger(__name__)
def _create_shutdown_file() -> IO:
return tempfile.NamedTemporaryFile(prefix="flax_wallet_weight_proof_handler_executor_shutdown_trigger")
class WalletWeightProofHandler:
def __init__(
self,
constants: ConsensusConstants,
multiprocessing_context: BaseContext,
):
self._constants = constants
self._num_processes = 4
self._executor_shutdown_tempfile: IO = _create_shutdown_file()
self._executor: ProcessPoolExecutor = ProcessPoolExecutor(
self._num_processes,
mp_context=multiprocessing_context,
initializer=setproctitle,
initargs=(f"{getproctitle()}_worker",),
)
self._weight_proof_tasks: List[asyncio.Task] = []
def cancel_weight_proof_tasks(self):
for task in self._weight_proof_tasks:
if not task.done():
task.cancel()
self._weight_proof_tasks = []
self._executor_shutdown_tempfile.close()
self._executor.shutdown(wait=True)
async def validate_weight_proof(
self, weight_proof: WeightProof, skip_segment_validation: bool = False, old_proof: Optional[WeightProof] = None
) -> Tuple[bool, List[SubEpochSummary], List[BlockRecord]]:
summaries, sub_epoch_weight_list = _validate_sub_epoch_summaries(self._constants, weight_proof)
await asyncio.sleep(0) # break up otherwise multi-second sync code
if summaries is None or sub_epoch_weight_list is None:
log.error("weight proof failed sub epoch data validation")
return False, [], []
validate_from = get_fork_ses_idx(old_proof, weight_proof)
task: asyncio.Task = asyncio.create_task(
validate_weight_proof_inner(
self._constants,
self._executor,
self._executor_shutdown_tempfile.name,
self._num_processes,
weight_proof,
summaries,
sub_epoch_weight_list,
skip_segment_validation,
validate_from,
)
)
self._weight_proof_tasks.append(task)
valid, block_records = await task
self._weight_proof_tasks.remove(task)
return valid, summaries, block_records
def get_wp_fork_point(constants: ConsensusConstants, old_wp: Optional[WeightProof], new_wp: WeightProof) -> uint32:
"""
iterate through sub epoch summaries to find fork point. This method is conservative, it does not return the
actual fork point, it can return a height that is before the actual fork point.
"""
if old_wp is None:
return uint32(0)
overflow = 0
count = 0
for idx, new_ses in enumerate(new_wp.sub_epochs):
if idx == len(new_wp.sub_epochs) - 1 or idx == len(old_wp.sub_epochs):
break
if new_ses.reward_chain_hash != old_wp.sub_epochs[idx].reward_chain_hash:
break
count = idx + 1
overflow = new_wp.sub_epochs[idx + 1].num_blocks_overflow
if new_wp.recent_chain_data[0].height < old_wp.recent_chain_data[-1].height:
# Try to find an exact fork point
new_wp_index = 0
old_wp_index = 0
while new_wp_index < len(new_wp.recent_chain_data) and old_wp_index < len(old_wp.recent_chain_data):
if new_wp.recent_chain_data[new_wp_index].header_hash == old_wp.recent_chain_data[old_wp_index].header_hash:
new_wp_index += 1
continue
# Keep incrementing left pointer until we find a match
old_wp_index += 1
if new_wp_index != 0:
# We found a matching block, this is the last matching block
return new_wp.recent_chain_data[new_wp_index - 1].height
# Just return the matching sub epoch height
return uint32((constants.SUB_EPOCH_BLOCKS * count) + overflow)
def get_fork_ses_idx(old_wp: Optional[WeightProof], new_wp: WeightProof) -> int:
"""
iterate through sub epoch summaries to find fork point. This method is conservative, it does not return the
actual fork point, it can return a height that is before the actual fork point.
"""
if old_wp is None:
return uint32(0)
ses_index = 0
for idx, new_ses in enumerate(new_wp.sub_epochs):
if new_ses.reward_chain_hash != old_wp.sub_epochs[idx].reward_chain_hash:
ses_index = idx
break
if idx == len(old_wp.sub_epochs) - 1:
ses_index = idx
break
return ses_index
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/outer_puzzles.py | flax/wallet/outer_puzzles.py | from __future__ import annotations
from enum import Enum
from typing import Dict, Optional
from flax.types.blockchain_format.program import Program
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.wallet.cat_wallet.cat_outer_puzzle import CATOuterPuzzle
from flax.wallet.driver_protocol import DriverProtocol
from flax.wallet.nft_wallet.metadata_outer_puzzle import MetadataOuterPuzzle
from flax.wallet.nft_wallet.ownership_outer_puzzle import OwnershipOuterPuzzle
from flax.wallet.nft_wallet.singleton_outer_puzzle import SingletonOuterPuzzle
from flax.wallet.nft_wallet.transfer_program_puzzle import TransferProgramPuzzle
from flax.wallet.puzzle_drivers import PuzzleInfo, Solver
from flax.wallet.uncurried_puzzle import UncurriedPuzzle
"""
This file provides a central location for acquiring drivers for outer puzzles like CATs, NFTs, etc.
A driver for a puzzle must include the following functions:
- match(self, puzzle: UncurriedPuzzle) -> Optional[PuzzleInfo]
- Given a puzzle reveal, return a PuzzleInfo object that can be used to reconstruct it later
- get_inner_puzzle(self, constructor: PuzzleInfo, puzzle_reveal: UncurriedPuzzle) -> Optional[Program]:
- Given a PuzzleInfo object and a puzzle reveal, pull out this outer puzzle's inner puzzle
- asset_id(self, constructor: PuzzleInfo) -> Optional[bytes32]
- Given a PuzzleInfo object, generate a 32 byte ID for use in dictionaries, etc.
- construct(self, constructor: PuzzleInfo, inner_puzzle: Program) -> Program
- Given a PuzzleInfo object and an innermost puzzle, construct a puzzle reveal for a coin spend
- solve(self, constructor: PuzzleInfo, solver: Solver, inner_puzzle: Program, inner_solution: Program) -> Program
- Given a PuzzleInfo object, a Solver object, and an innermost puzzle and its solution return a solution for a spend
- The "Solver" object can contain any dictionary, it's up to the driver to enforce the needed elements of the API
- Some classes that wish to integrate with a driver may not have access to all of the info it needs so the driver
needs to raise errors appropriately
"""
class AssetType(Enum):
CAT = "CAT"
SINGLETON = "singleton"
METADATA = "metadata"
OWNERSHIP = "ownership"
ROYALTY_TRANSFER_PROGRAM = "royalty transfer program"
def match_puzzle(puzzle: UncurriedPuzzle) -> Optional[PuzzleInfo]:
for driver in driver_lookup.values():
potential_info: Optional[PuzzleInfo] = driver.match(puzzle)
if potential_info is not None:
return potential_info
return None
def construct_puzzle(constructor: PuzzleInfo, inner_puzzle: Program) -> Program:
return driver_lookup[AssetType(constructor.type())].construct(constructor, inner_puzzle)
def solve_puzzle(constructor: PuzzleInfo, solver: Solver, inner_puzzle: Program, inner_solution: Program) -> Program:
return driver_lookup[AssetType(constructor.type())].solve(constructor, solver, inner_puzzle, inner_solution)
def get_inner_puzzle(constructor: PuzzleInfo, puzzle_reveal: UncurriedPuzzle) -> Optional[Program]:
return driver_lookup[AssetType(constructor.type())].get_inner_puzzle(constructor, puzzle_reveal)
def get_inner_solution(constructor: PuzzleInfo, solution: Program) -> Optional[Program]:
return driver_lookup[AssetType(constructor.type())].get_inner_solution(constructor, solution)
def create_asset_id(constructor: PuzzleInfo) -> Optional[bytes32]:
return driver_lookup[AssetType(constructor.type())].asset_id(constructor)
function_args = (match_puzzle, construct_puzzle, solve_puzzle, get_inner_puzzle, get_inner_solution)
driver_lookup: Dict[AssetType, DriverProtocol] = {
AssetType.CAT: CATOuterPuzzle(*function_args),
AssetType.SINGLETON: SingletonOuterPuzzle(*function_args),
AssetType.METADATA: MetadataOuterPuzzle(*function_args),
AssetType.OWNERSHIP: OwnershipOuterPuzzle(*function_args),
AssetType.ROYALTY_TRANSFER_PROGRAM: TransferProgramPuzzle(*function_args),
}
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/util/address_type.py | flax/wallet/util/address_type.py | from __future__ import annotations
from enum import Enum
from typing import Any, Dict, Set
from flax.util.bech32m import bech32_decode, convertbits
from flax.util.config import selected_network_address_prefix
class AddressType(Enum):
XFX = "xfx"
NFT = "nft"
DID = "did:flax:"
def hrp(self, config: Dict[str, Any]) -> str:
if self == AddressType.XFX:
# Special case to map XFX to the current network's address prefix
return selected_network_address_prefix(config)
return self.value
def expected_decoded_length(self) -> int:
# Current address types encode 32 bytes. If future address types vary in
# their length, this will need to be updated.
return 32
def is_valid_address(address: str, allowed_types: Set[AddressType], config: Dict[str, Any]) -> bool:
try:
ensure_valid_address(address, allowed_types=allowed_types, config=config)
return True
except ValueError:
return False
def ensure_valid_address(address: str, *, allowed_types: Set[AddressType], config: Dict[str, Any]) -> str:
hrp, b32data = bech32_decode(address)
if not b32data or not hrp:
raise ValueError(f"Invalid address: {address}")
# Match by prefix (hrp) and return the corresponding address type
address_type = next(
(addr_type for (addr_type, addr_hrp) in ((a, a.hrp(config)) for a in allowed_types) if addr_hrp == hrp),
None,
)
if address_type is None:
raise ValueError(
f"Invalid address: {address}. "
f"Expected an address with one of the following prefixes: {[t.hrp(config) for t in allowed_types]}"
)
decoded_data = convertbits(b32data, 5, 8, False)
if len(decoded_data) != address_type.expected_decoded_length():
raise ValueError(
f"Invalid address: {address}. "
f"Expected {address_type.expected_decoded_length()} bytes, got {len(decoded_data)}"
)
return address
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/util/curry_and_treehash.py | flax/wallet/util/curry_and_treehash.py | from __future__ import annotations
from hashlib import sha256
from typing import Callable, List
from flax.types.blockchain_format.sized_bytes import bytes32
CurryHashFunction = Callable[..., bytes32]
NULL = bytes.fromhex("")
ONE = bytes.fromhex("01")
TWO = bytes.fromhex("02")
Q_KW = bytes.fromhex("01")
A_KW = bytes.fromhex("02")
C_KW = bytes.fromhex("04")
def shatree_atom(atom: bytes) -> bytes32:
s = sha256()
s.update(ONE)
s.update(atom)
return bytes32(s.digest())
def shatree_pair(left_hash: bytes32, right_hash: bytes32) -> bytes32:
s = sha256()
s.update(TWO)
s.update(left_hash)
s.update(right_hash)
return bytes32(s.digest())
Q_KW_TREEHASH = shatree_atom(Q_KW)
A_KW_TREEHASH = shatree_atom(A_KW)
C_KW_TREEHASH = shatree_atom(C_KW)
ONE_TREEHASH = shatree_atom(ONE)
NULL_TREEHASH = shatree_atom(NULL)
# The environment `E = (F . R)` recursively expands out to
# `(c . ((q . F) . EXPANSION(R)))` if R is not 0
# `1` if R is 0
def curried_values_tree_hash(arguments: List[bytes32]) -> bytes32:
if len(arguments) == 0:
return ONE_TREEHASH
return shatree_pair(
C_KW_TREEHASH,
shatree_pair(
shatree_pair(Q_KW_TREEHASH, arguments[0]),
shatree_pair(curried_values_tree_hash(arguments[1:]), NULL_TREEHASH),
),
)
# The curry pattern is `(a . ((q . F) . (E . 0)))` == `(a (q . F) E)
# where `F` is the `mod` and `E` is the curried environment
def curry_and_treehash(hash_of_quoted_mod_hash: bytes32, *hashed_arguments: bytes32) -> bytes32:
"""
`hash_of_quoted_mod_hash` : tree hash of `(q . MOD)` where `MOD` is template to be curried
`arguments` : tree hashes of arguments to be curried
"""
curried_values = curried_values_tree_hash(list(hashed_arguments))
return shatree_pair(
A_KW_TREEHASH,
shatree_pair(hash_of_quoted_mod_hash, shatree_pair(curried_values, NULL_TREEHASH)),
)
def calculate_hash_of_quoted_mod_hash(mod_hash: bytes32) -> bytes32:
return shatree_pair(Q_KW_TREEHASH, mod_hash)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/util/wallet_types.py | flax/wallet/util/wallet_types.py | from __future__ import annotations
from enum import IntEnum
from typing import List
from typing_extensions import TypedDict
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.ints import uint64
class WalletType(IntEnum):
# Wallet Types
STANDARD_WALLET = 0
ATOMIC_SWAP = 2
AUTHORIZED_PAYEE = 3
MULTI_SIG = 4
CUSTODY = 5
CAT = 6
RECOVERABLE = 7
DECENTRALIZED_ID = 8
POOLING_WALLET = 9
NFT = 10
DATA_LAYER = 11
DATA_LAYER_OFFER = 12
class AmountWithPuzzlehash(TypedDict):
amount: uint64
puzzlehash: bytes32
memos: List[bytes]
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/util/transaction_type.py | flax/wallet/util/transaction_type.py | from __future__ import annotations
from enum import IntEnum
class TransactionType(IntEnum):
INCOMING_TX = 0
OUTGOING_TX = 1
COINBASE_REWARD = 2
FEE_REWARD = 3
INCOMING_TRADE = 4
OUTGOING_TRADE = 5
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/util/compute_memos.py | flax/wallet/util/compute_memos.py | from typing import List, Dict
from clvm.casts import int_from_bytes
from flax.types.blockchain_format.coin import Coin
from flax.types.blockchain_format.program import INFINITE_COST
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.coin_spend import CoinSpend
from flax.types.condition_opcodes import ConditionOpcode
from flax.types.spend_bundle import SpendBundle
def compute_memos_for_spend(coin_spend: CoinSpend) -> Dict[bytes32, List[bytes]]:
_, result = coin_spend.puzzle_reveal.run_with_cost(INFINITE_COST, coin_spend.solution)
memos: Dict[bytes32, List[bytes]] = {}
for condition in result.as_python():
if condition[0] == ConditionOpcode.CREATE_COIN and len(condition) >= 4:
# If only 3 elements (opcode + 2 args), there is no memo, this is ph, amount
coin_added = Coin(coin_spend.coin.name(), bytes32(condition[1]), int_from_bytes(condition[2]))
if type(condition[3]) != list:
# If it's not a list, it's not the correct format
continue
memos[coin_added.name()] = condition[3]
return memos
def compute_memos(bundle: SpendBundle) -> Dict[bytes32, List[bytes]]:
"""
Retrieves the memos for additions in this spend_bundle, which are formatted as a list in the 3rd parameter of
CREATE_COIN. If there are no memos, the addition coin_id is not included. If they are not formatted as a list
of bytes, they are not included. This is expensive to call, it should not be used in full node code.
"""
memos: Dict[bytes32, List[bytes]] = {}
for coin_spend in bundle.coin_spends:
spend_memos = compute_memos_for_spend(coin_spend)
for coin_name, coin_memos in spend_memos.items():
existing_memos = memos.get(coin_name)
if existing_memos is None:
memos[coin_name] = coin_memos
else:
memos[coin_name] = existing_memos + coin_memos
return memos
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/util/compute_hints.py | flax/wallet/util/compute_hints.py | from typing import List
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.condition_opcodes import ConditionOpcode
from flax.types.blockchain_format.program import INFINITE_COST
from flax.types.coin_spend import CoinSpend
def compute_coin_hints(cs: CoinSpend) -> List[bytes32]:
_, result_program = cs.puzzle_reveal.run_with_cost(INFINITE_COST, cs.solution)
h_list: List[bytes32] = []
for condition_data in result_program.as_python():
condition = condition_data[0]
args = condition_data[1:]
if condition == ConditionOpcode.CREATE_COIN and len(args) > 2:
if isinstance(args[2], list):
if isinstance(args[2][0], bytes):
h_list.append(bytes32(args[2][0]))
return h_list
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/util/merkle_utils.py | flax/wallet/util/merkle_utils.py | from __future__ import annotations
import hashlib
from typing import Any, Dict, List, Tuple
from flax.types.blockchain_format.sized_bytes import bytes32
TupleTree = Any # Union[bytes32, Tuple["TupleTree", "TupleTree"]]
Proof_Tree_Type = Any # Union[bytes32, Tuple[bytes32, "Proof_Tree_Type"]]
HASH_TREE_PREFIX = bytes([2])
HASH_LEAF_PREFIX = bytes([1])
# paths here are not quite the same a `NodePath` paths. We don't need the high order bit
# anymore since the proof indicates how big the path is.
def compose_paths(path_1: int, path_2: int, path_2_length: int) -> int:
return (path_1 << path_2_length) | path_2
def sha256(*args: bytes) -> bytes32:
return bytes32(hashlib.sha256(b"".join(args)).digest())
def build_merkle_tree_from_binary_tree(tuples: TupleTree) -> Tuple[bytes32, Dict[bytes32, Tuple[int, List[bytes32]]]]:
if isinstance(tuples, bytes):
tuples = bytes32(tuples)
return sha256(HASH_LEAF_PREFIX, tuples), {tuples: (0, [])}
left, right = tuples
left_root, left_proofs = build_merkle_tree_from_binary_tree(left)
right_root, right_proofs = build_merkle_tree_from_binary_tree(right)
new_root = sha256(HASH_TREE_PREFIX, left_root, right_root)
new_proofs = {}
for name, (path, proof) in left_proofs.items():
proof.append(right_root)
new_proofs[name] = (path, proof)
for name, (path, proof) in right_proofs.items():
path |= 1 << len(proof)
proof.append(left_root)
new_proofs[name] = (path, proof)
return new_root, new_proofs
def list_to_binary_tree(objects: List[Any]) -> Any:
size = len(objects)
if size == 1:
return objects[0]
midpoint = (size + 1) >> 1
first_half = objects[:midpoint]
last_half = objects[midpoint:]
return (list_to_binary_tree(first_half), list_to_binary_tree(last_half))
def build_merkle_tree(objects: List[bytes32]) -> Tuple[bytes32, Dict[bytes32, Tuple[int, List[bytes32]]]]:
"""
return (merkle_root, dict_of_proofs)
"""
objects_binary_tree = list_to_binary_tree(objects)
return build_merkle_tree_from_binary_tree(objects_binary_tree)
def merkle_proof_from_path_and_tree(node_path: int, proof_tree: Proof_Tree_Type) -> Tuple[int, List[bytes32]]:
proof_path = 0
proof = []
while not isinstance(proof_tree, bytes32):
left_vs_right = node_path & 1
path_element = proof_tree[1][1 - left_vs_right]
if isinstance(path_element, bytes32):
proof.append(path_element)
else:
proof.append(path_element[0])
node_path >>= 1
proof_tree = proof_tree[1][left_vs_right]
proof_path += proof_path + left_vs_right
proof.reverse()
return proof_path, proof
def _simplify_merkle_proof(tree_hash: bytes32, proof: Tuple[int, List[bytes32]]) -> bytes32:
# we return the expected merkle root
path, nodes = proof
for node in nodes:
if path & 1:
tree_hash = sha256(HASH_TREE_PREFIX, node, tree_hash)
else:
tree_hash = sha256(HASH_TREE_PREFIX, tree_hash, node)
path >>= 1
return tree_hash
def simplify_merkle_proof(tree_hash: bytes32, proof: Tuple[int, List[bytes32]]) -> bytes32:
return _simplify_merkle_proof(sha256(HASH_LEAF_PREFIX, tree_hash), proof)
def check_merkle_proof(merkle_root: bytes32, tree_hash: bytes32, proof: Tuple[int, List[bytes32]]) -> bool:
return merkle_root == simplify_merkle_proof(tree_hash, proof)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/util/peer_request_cache.py | flax/wallet/util/peer_request_cache.py | from __future__ import annotations
import asyncio
from typing import Any, Optional, Tuple
from flax.protocols.wallet_protocol import CoinState
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.header_block import HeaderBlock
from flax.util.hash import std_hash
from flax.util.ints import uint32, uint64
from flax.util.lru_cache import LRUCache
class PeerRequestCache:
_blocks: LRUCache[uint32, HeaderBlock] # height -> HeaderBlock
_block_requests: LRUCache[Tuple[uint32, uint32], asyncio.Task[Any]] # (start, end) -> Task
_states_validated: LRUCache[bytes32, Optional[uint32]] # coin state hash -> last change height, or None for reorg
_timestamps: LRUCache[uint32, uint64] # block height -> timestamp
_blocks_validated: LRUCache[bytes32, uint32] # header_hash -> height
_block_signatures_validated: LRUCache[bytes32, uint32] # sig_hash -> height
_additions_in_block: LRUCache[Tuple[bytes32, bytes32], uint32] # header_hash, puzzle_hash -> height
def __init__(self) -> None:
self._blocks = LRUCache(100)
self._block_requests = LRUCache(300)
self._states_validated = LRUCache(1000)
self._timestamps = LRUCache(1000)
self._blocks_validated = LRUCache(1000)
self._block_signatures_validated = LRUCache(1000)
self._additions_in_block = LRUCache(200)
def get_block(self, height: uint32) -> Optional[HeaderBlock]:
return self._blocks.get(height)
def add_to_blocks(self, header_block: HeaderBlock) -> None:
self._blocks.put(header_block.height, header_block)
if header_block.is_transaction_block:
assert header_block.foliage_transaction_block is not None
if self._timestamps.get(header_block.height) is None:
self._timestamps.put(header_block.height, header_block.foliage_transaction_block.timestamp)
def get_block_request(self, start: uint32, end: uint32) -> Optional[asyncio.Task[Any]]:
return self._block_requests.get((start, end))
def add_to_block_requests(self, start: uint32, end: uint32, request: asyncio.Task[Any]) -> None:
self._block_requests.put((start, end), request)
def in_states_validated(self, coin_state_hash: bytes32) -> bool:
return self._states_validated.get(coin_state_hash) is not None
def add_to_states_validated(self, coin_state: CoinState) -> None:
cs_height: Optional[uint32] = None
if coin_state.spent_height is not None:
cs_height = uint32(coin_state.spent_height)
elif coin_state.created_height is not None:
cs_height = uint32(coin_state.created_height)
self._states_validated.put(coin_state.get_hash(), cs_height)
def get_height_timestamp(self, height: uint32) -> Optional[uint64]:
return self._timestamps.get(height)
def add_to_blocks_validated(self, reward_chain_hash: bytes32, height: uint32) -> None:
self._blocks_validated.put(reward_chain_hash, height)
def in_blocks_validated(self, reward_chain_hash: bytes32) -> bool:
return self._blocks_validated.get(reward_chain_hash) is not None
def add_to_block_signatures_validated(self, block: HeaderBlock) -> None:
sig_hash: bytes32 = self._calculate_sig_hash_from_block(block)
self._block_signatures_validated.put(sig_hash, block.height)
@staticmethod
def _calculate_sig_hash_from_block(block: HeaderBlock) -> bytes32:
return std_hash(
bytes(block.reward_chain_block.proof_of_space.plot_public_key)
+ bytes(block.foliage.foliage_block_data)
+ bytes(block.foliage.foliage_block_data_signature)
)
def in_block_signatures_validated(self, block: HeaderBlock) -> bool:
sig_hash: bytes32 = self._calculate_sig_hash_from_block(block)
return self._block_signatures_validated.get(sig_hash) is not None
def add_to_additions_in_block(self, header_hash: bytes32, addition_ph: bytes32, height: uint32) -> None:
self._additions_in_block.put((header_hash, addition_ph), height)
def in_additions_in_block(self, header_hash: bytes32, addition_ph: bytes32) -> bool:
return self._additions_in_block.get((header_hash, addition_ph)) is not None
def clear_after_height(self, height: int) -> None:
# Remove any cached item which relates to an event that happened at a height above height.
new_blocks = LRUCache[uint32, HeaderBlock](self._blocks.capacity)
for k, v in self._blocks.cache.items():
if k <= height:
new_blocks.put(k, v)
self._blocks = new_blocks
new_block_requests: LRUCache[Tuple[uint32, uint32], asyncio.Task[Any]] = LRUCache(self._block_requests.capacity)
for (start_h, end_h), fetch_task in self._block_requests.cache.items():
if start_h <= height and end_h <= height:
new_block_requests.put((start_h, end_h), fetch_task)
self._block_requests = new_block_requests
new_states_validated: LRUCache[bytes32, Optional[uint32]] = LRUCache(self._states_validated.capacity)
for cs_hash, cs_height in self._states_validated.cache.items():
if cs_height is not None and cs_height <= height:
new_states_validated.put(cs_hash, cs_height)
self._states_validated = new_states_validated
new_timestamps: LRUCache[uint32, uint64] = LRUCache(self._timestamps.capacity)
for h, ts in self._timestamps.cache.items():
if h <= height:
new_timestamps.put(h, ts)
self._timestamps = new_timestamps
new_blocks_validated: LRUCache[bytes32, uint32] = LRUCache(self._blocks_validated.capacity)
for hh, h in self._blocks_validated.cache.items():
if h <= height:
new_blocks_validated.put(hh, h)
self._blocks_validated = new_blocks_validated
new_block_signatures_validated: LRUCache[bytes32, uint32] = LRUCache(self._block_signatures_validated.capacity)
for sig_hash, h in self._block_signatures_validated.cache.items():
if h <= height:
new_block_signatures_validated.put(sig_hash, h)
self._block_signatures_validated = new_block_signatures_validated
new_additions_in_block: LRUCache[Tuple[bytes32, bytes32], uint32] = LRUCache(self._additions_in_block.capacity)
for (hh, ph), h in self._additions_in_block.cache.items():
if h <= height:
new_additions_in_block.put((hh, ph), h)
self._additions_in_block = new_additions_in_block
async def can_use_peer_request_cache(
coin_state: CoinState, peer_request_cache: PeerRequestCache, fork_height: Optional[uint32]
) -> bool:
if not peer_request_cache.in_states_validated(coin_state.get_hash()):
return False
if fork_height is None:
return True
if coin_state.created_height is None and coin_state.spent_height is None:
# Performing a reorg
return False
if coin_state.created_height is not None and coin_state.created_height > fork_height:
return False
if coin_state.spent_height is not None and coin_state.spent_height > fork_height:
return False
return True
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/util/wallet_sync_utils.py | flax/wallet/util/wallet_sync_utils.py | import asyncio
from flax.protocols.shared_protocol import Capability
import logging
import random
from typing import List, Optional, Tuple, Union, Dict
from chia_rs import compute_merkle_set_root
from flax.consensus.constants import ConsensusConstants
from flax.protocols import wallet_protocol
from flax.protocols.wallet_protocol import (
RequestAdditions,
RequestBlockHeaders,
RespondAdditions,
RejectAdditionsRequest,
RejectRemovalsRequest,
RespondBlockHeaders,
RespondRemovals,
RequestRemovals,
CoinState,
RespondToPhUpdates,
RespondToCoinUpdates,
RespondHeaderBlocks,
RequestHeaderBlocks,
RejectHeaderBlocks,
RejectBlockHeaders,
)
from flax.server.ws_connection import WSFlaxConnection
from flax.types.blockchain_format.coin import hash_coin_ids, Coin
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.full_block import FullBlock
from flax.types.header_block import HeaderBlock
from flax.util.ints import uint32
from flax.util.merkle_set import confirm_not_included_already_hashed, confirm_included_already_hashed, MerkleSet
from flax.wallet.util.peer_request_cache import PeerRequestCache
log = logging.getLogger(__name__)
class PeerRequestException(Exception):
pass
async def fetch_last_tx_from_peer(height: uint32, peer: WSFlaxConnection) -> Optional[HeaderBlock]:
request_height: int = height
while True:
if request_height == -1:
return None
response: Optional[List[HeaderBlock]] = await request_header_blocks(
peer, uint32(request_height), uint32(request_height)
)
if response is not None and len(response) > 0:
if response[0].is_transaction_block:
return response[0]
elif request_height < height:
# The peer might be slightly behind others but still synced, so we should allow fetching one more TX block
break
request_height = request_height - 1
return None
async def subscribe_to_phs(
puzzle_hashes: List[bytes32],
peer: WSFlaxConnection,
min_height: int,
) -> List[CoinState]:
"""
Tells full nodes that we are interested in puzzle hashes, and returns the response.
"""
msg = wallet_protocol.RegisterForPhUpdates(puzzle_hashes, uint32(max(min_height, uint32(0))))
all_coins_state: Optional[RespondToPhUpdates] = await peer.register_interest_in_puzzle_hash(msg, timeout=300)
if all_coins_state is None:
raise ValueError(f"None response from peer {peer.peer_host} for register_interest_in_puzzle_hash")
return all_coins_state.coin_states
async def subscribe_to_coin_updates(
coin_names: List[bytes32],
peer: WSFlaxConnection,
min_height: int,
) -> List[CoinState]:
"""
Tells full nodes that we are interested in coin ids, and returns the response.
"""
msg = wallet_protocol.RegisterForCoinUpdates(coin_names, uint32(max(0, min_height)))
all_coins_state: Optional[RespondToCoinUpdates] = await peer.register_interest_in_coin(msg, timeout=300)
if all_coins_state is None:
raise ValueError(f"None response from peer {peer.peer_host} for register_interest_in_coin")
return all_coins_state.coin_states
def validate_additions(
coins: List[Tuple[bytes32, List[Coin]]],
proofs: Optional[List[Tuple[bytes32, bytes, Optional[bytes]]]],
root: bytes32,
):
if proofs is None:
# Verify root
additions_merkle_items: List[bytes32] = []
# Addition Merkle set contains puzzlehash and hash of all coins with that puzzlehash
for puzzle_hash, coins_l in coins:
additions_merkle_items.append(puzzle_hash)
additions_merkle_items.append(hash_coin_ids([c.name() for c in coins_l]))
additions_root = bytes32(compute_merkle_set_root(additions_merkle_items))
if root != additions_root:
return False
else:
for i in range(len(coins)):
assert coins[i][0] == proofs[i][0]
coin_list_1: List[Coin] = coins[i][1]
puzzle_hash_proof: Optional[bytes] = proofs[i][1]
coin_list_proof: Optional[bytes] = proofs[i][2]
if len(coin_list_1) == 0:
# Verify exclusion proof for puzzle hash
assert puzzle_hash_proof is not None
not_included = confirm_not_included_already_hashed(
root,
coins[i][0],
puzzle_hash_proof,
)
if not_included is False:
return False
else:
try:
# Verify inclusion proof for coin list
assert coin_list_proof is not None
included = confirm_included_already_hashed(
root,
hash_coin_ids([c.name() for c in coin_list_1]),
coin_list_proof,
)
if included is False:
return False
except AssertionError:
return False
try:
# Verify inclusion proof for puzzle hash
assert puzzle_hash_proof is not None
included = confirm_included_already_hashed(
root,
coins[i][0],
puzzle_hash_proof,
)
if included is False:
return False
except AssertionError:
return False
return True
def validate_removals(
coins: List[Tuple[bytes32, Optional[Coin]]], proofs: Optional[List[Tuple[bytes32, bytes]]], root: bytes32
):
if proofs is None:
# If there are no proofs, it means all removals were returned in the response.
# we must find the ones relevant to our wallets.
# Verify removals root
removals_merkle_set = MerkleSet()
for name_coin in coins:
_, coin = name_coin
if coin is not None:
removals_merkle_set.add_already_hashed(coin.name())
removals_root = removals_merkle_set.get_root()
if root != removals_root:
return False
else:
# This means the full node has responded only with the relevant removals
# for our wallet. Each merkle proof must be verified.
if len(coins) != len(proofs):
return False
for i in range(len(coins)):
# Coins are in the same order as proofs
if coins[i][0] != proofs[i][0]:
return False
coin = coins[i][1]
if coin is None:
# Verifies merkle proof of exclusion
not_included = confirm_not_included_already_hashed(
root,
coins[i][0],
proofs[i][1],
)
if not_included is False:
return False
else:
# Verifies merkle proof of inclusion of coin name
if coins[i][0] != coin.name():
return False
included = confirm_included_already_hashed(
root,
coin.name(),
proofs[i][1],
)
if included is False:
return False
return True
async def request_and_validate_removals(
peer: WSFlaxConnection, height: uint32, header_hash: bytes32, coin_name: bytes32, removals_root: bytes32
) -> bool:
removals_request = RequestRemovals(height, header_hash, [coin_name])
removals_res: Optional[Union[RespondRemovals, RejectRemovalsRequest]] = await peer.request_removals(
removals_request
)
if removals_res is None or isinstance(removals_res, RejectRemovalsRequest):
return False
assert removals_res.proofs is not None
return validate_removals(removals_res.coins, removals_res.proofs, removals_root)
async def request_and_validate_additions(
peer: WSFlaxConnection,
peer_request_cache: PeerRequestCache,
height: uint32,
header_hash: bytes32,
puzzle_hash: bytes32,
additions_root: bytes32,
) -> bool:
if peer_request_cache.in_additions_in_block(header_hash, puzzle_hash):
return True
additions_request = RequestAdditions(height, header_hash, [puzzle_hash])
additions_res: Optional[Union[RespondAdditions, RejectAdditionsRequest]] = await peer.request_additions(
additions_request
)
if additions_res is None or isinstance(additions_res, RejectAdditionsRequest):
return False
result: bool = validate_additions(
additions_res.coins,
additions_res.proofs,
additions_root,
)
peer_request_cache.add_to_additions_in_block(header_hash, puzzle_hash, height)
return result
def get_block_challenge(
constants: ConsensusConstants,
header_block: FullBlock,
all_blocks: Dict[bytes32, FullBlock],
genesis_block: bool,
overflow: bool,
skip_overflow_last_ss_validation: bool,
) -> Optional[bytes32]:
if len(header_block.finished_sub_slots) > 0:
if overflow:
# New sub-slot with overflow block
if skip_overflow_last_ss_validation:
# In this case, we are missing the final sub-slot bundle (it's not finished yet), however
# There is a whole empty slot before this block is infused
challenge: bytes32 = header_block.finished_sub_slots[-1].challenge_chain.get_hash()
else:
challenge = header_block.finished_sub_slots[
-1
].challenge_chain.challenge_chain_end_of_slot_vdf.challenge
else:
# No overflow, new slot with a new challenge
challenge = header_block.finished_sub_slots[-1].challenge_chain.get_hash()
else:
if genesis_block:
challenge = constants.GENESIS_CHALLENGE
else:
if overflow:
if skip_overflow_last_ss_validation:
# Overflow infusion without the new slot, so get the last challenge
challenges_to_look_for = 1
else:
# Overflow infusion, so get the second to last challenge. skip_overflow_last_ss_validation is False,
# Which means no sub slots are omitted
challenges_to_look_for = 2
else:
challenges_to_look_for = 1
reversed_challenge_hashes: List[bytes32] = []
if header_block.height == 0:
return constants.GENESIS_CHALLENGE
if header_block.prev_header_hash not in all_blocks:
return None
curr: Optional[FullBlock] = all_blocks[header_block.prev_header_hash]
while len(reversed_challenge_hashes) < challenges_to_look_for:
if curr is None:
return None
if len(curr.finished_sub_slots) > 0:
reversed_challenge_hashes += reversed(
[slot.challenge_chain.get_hash() for slot in curr.finished_sub_slots]
)
if curr.height == 0:
return constants.GENESIS_CHALLENGE
curr = all_blocks.get(curr.prev_header_hash, None)
challenge = reversed_challenge_hashes[challenges_to_look_for - 1]
return challenge
def last_change_height_cs(cs: CoinState) -> uint32:
if cs.spent_height is not None:
return uint32(cs.spent_height)
if cs.created_height is not None:
return uint32(cs.created_height)
# Reorgs should be processed at the beginning
return uint32(0)
def get_block_header(block):
return HeaderBlock(
block.finished_sub_slots,
block.reward_chain_block,
block.challenge_chain_sp_proof,
block.challenge_chain_ip_proof,
block.reward_chain_sp_proof,
block.reward_chain_ip_proof,
block.infused_challenge_chain_ip_proof,
block.foliage,
block.foliage_transaction_block,
b"", # we don't need the filter
block.transactions_info,
)
async def request_header_blocks(
peer: WSFlaxConnection, start_height: uint32, end_height: uint32
) -> Optional[List[HeaderBlock]]:
if Capability.BLOCK_HEADERS in peer.peer_capabilities:
response = await peer.request_block_headers(RequestBlockHeaders(start_height, end_height, False))
else:
response = await peer.request_header_blocks(RequestHeaderBlocks(start_height, end_height))
if response is None or isinstance(response, RejectBlockHeaders) or isinstance(response, RejectHeaderBlocks):
return None
return response.header_blocks
async def _fetch_header_blocks_inner(
all_peers: List[Tuple[WSFlaxConnection, bool]],
request_start: uint32,
request_end: uint32,
) -> Optional[Union[RespondHeaderBlocks, RespondBlockHeaders]]:
# We will modify this list, don't modify passed parameters.
bytes_api_peers = [peer for peer in all_peers if Capability.BLOCK_HEADERS in peer[0].peer_capabilities]
other_peers = [peer for peer in all_peers if Capability.BLOCK_HEADERS not in peer[0].peer_capabilities]
random.shuffle(bytes_api_peers)
random.shuffle(other_peers)
for peer, is_trusted in bytes_api_peers + other_peers:
if Capability.BLOCK_HEADERS in peer.peer_capabilities:
response = await peer.request_block_headers(RequestBlockHeaders(request_start, request_end, False))
else:
response = await peer.request_header_blocks(RequestHeaderBlocks(request_start, request_end))
if isinstance(response, (RespondHeaderBlocks, RespondBlockHeaders)):
return response
# Request to peer failed in some way, close the connection and remove the peer
# from our local list.
if not is_trusted:
log.info(f"Closing peer {peer} since it does not have the blocks we asked for")
await peer.close()
return None
async def fetch_header_blocks_in_range(
start: uint32,
end: uint32,
peer_request_cache: PeerRequestCache,
all_peers: List[Tuple[WSFlaxConnection, bool]],
) -> Optional[List[HeaderBlock]]:
blocks: List[HeaderBlock] = []
for i in range(start - (start % 32), end + 1, 32):
request_start = min(uint32(i), end)
request_end = min(uint32(i + 31), end)
res_h_blocks_task: Optional[asyncio.Task] = peer_request_cache.get_block_request(request_start, request_end)
if res_h_blocks_task is not None:
log.debug(f"Using cache for: {start}-{end}")
if res_h_blocks_task.done():
res_h_blocks: Optional[Union[RespondBlockHeaders, RespondHeaderBlocks]] = res_h_blocks_task.result()
else:
res_h_blocks = await res_h_blocks_task
else:
log.debug(f"Fetching: {start}-{end}")
res_h_blocks_task = asyncio.create_task(_fetch_header_blocks_inner(all_peers, request_start, request_end))
peer_request_cache.add_to_block_requests(request_start, request_end, res_h_blocks_task)
res_h_blocks = await res_h_blocks_task
if res_h_blocks is None:
return None
assert res_h_blocks is not None
blocks.extend([bl for bl in res_h_blocks.header_blocks if bl.height >= start])
return blocks
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/util/__init__.py | flax/wallet/util/__init__.py | python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false | |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/util/new_peak_queue.py | flax/wallet/util/new_peak_queue.py | from __future__ import annotations
import asyncio
import dataclasses
from enum import IntEnum
from typing import Any, List
from flax.protocols.wallet_protocol import CoinStateUpdate, NewPeakWallet
from flax.server.ws_connection import WSFlaxConnection
from flax.types.blockchain_format.sized_bytes import bytes32
class NewPeakQueueTypes(IntEnum):
# Lower number means higher priority in the queue
COIN_ID_SUBSCRIPTION = 1
PUZZLE_HASH_SUBSCRIPTION = 2
FULL_NODE_STATE_UPDATED = 3
NEW_PEAK_WALLET = 4
@dataclasses.dataclass
class NewPeakItem:
item_type: NewPeakQueueTypes
data: Any
def __lt__(self, other):
if self.item_type != other.item_type:
return self.item_type < other.item_type
if self.item_type in {NewPeakQueueTypes.COIN_ID_SUBSCRIPTION, NewPeakQueueTypes.PUZZLE_HASH_SUBSCRIPTION}:
return False # All subscriptions are equal
return self.data[0].height < other.data[0].height
def __le__(self, other):
if self.item_type != other.item_type:
return self.item_type < other.item_type
if self.item_type in {NewPeakQueueTypes.COIN_ID_SUBSCRIPTION, NewPeakQueueTypes.PUZZLE_HASH_SUBSCRIPTION}:
return True # All subscriptions are equal
return self.data[0].height <= other.data[0].height
def __gt__(self, other):
if self.item_type != other.item_type:
return self.item_type > other.item_type
if self.item_type in {NewPeakQueueTypes.COIN_ID_SUBSCRIPTION, NewPeakQueueTypes.PUZZLE_HASH_SUBSCRIPTION}:
return False # All subscriptions are equal
return self.data[0].height > other.data[0].height
def __ge__(self, other):
if self.item_type != other.item_type:
return self.item_type > other.item_type
if self.item_type in {NewPeakQueueTypes.COIN_ID_SUBSCRIPTION, NewPeakQueueTypes.PUZZLE_HASH_SUBSCRIPTION}:
return True # All subscriptions are equal
return self.data[0].height >= other.data[0].height
class NewPeakQueue:
def __init__(self, inner_queue: asyncio.PriorityQueue):
self._inner_queue: asyncio.PriorityQueue = inner_queue
self._pending_data_process_items: int = 0
async def subscribe_to_coin_ids(self, coin_ids: List[bytes32]):
self._pending_data_process_items += 1
await self._inner_queue.put(NewPeakItem(NewPeakQueueTypes.COIN_ID_SUBSCRIPTION, coin_ids))
async def subscribe_to_puzzle_hashes(self, puzzle_hashes: List[bytes32]):
self._pending_data_process_items += 1
await self._inner_queue.put(NewPeakItem(NewPeakQueueTypes.PUZZLE_HASH_SUBSCRIPTION, puzzle_hashes))
async def full_node_state_updated(self, coin_state_update: CoinStateUpdate, peer: WSFlaxConnection):
self._pending_data_process_items += 1
await self._inner_queue.put(NewPeakItem(NewPeakQueueTypes.FULL_NODE_STATE_UPDATED, (coin_state_update, peer)))
async def new_peak_wallet(self, new_peak: NewPeakWallet, peer: WSFlaxConnection):
await self._inner_queue.put(NewPeakItem(NewPeakQueueTypes.NEW_PEAK_WALLET, (new_peak, peer)))
async def get(self) -> NewPeakItem:
item: NewPeakItem = await self._inner_queue.get()
if item.item_type != NewPeakQueueTypes.NEW_PEAK_WALLET:
self._pending_data_process_items -= 1
return item
def has_pending_data_process_items(self) -> bool:
return self._pending_data_process_items > 0
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/util/notifications.py | flax/wallet/util/notifications.py | from __future__ import annotations
from flax.types.blockchain_format.program import Program
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.ints import uint64
from flax.wallet.puzzles.load_clvm import load_clvm_maybe_recompile
NOTIFICATION_MOD = load_clvm_maybe_recompile("notification.clvm")
def construct_notification(target: bytes32, amount: uint64) -> Program:
return NOTIFICATION_MOD.curry(target, amount)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/util/debug_spend_bundle.py | flax/wallet/util/debug_spend_bundle.py | from typing import List
from blspy import AugSchemeMPL, G1Element
from clvm import KEYWORD_FROM_ATOM
from clvm_tools.binutils import disassemble as bu_disassemble
from flax.types.blockchain_format.coin import Coin
from flax.types.blockchain_format.program import Program, INFINITE_COST
from flax.consensus.default_constants import DEFAULT_CONSTANTS
from flax.types.condition_opcodes import ConditionOpcode
from flax.util.condition_tools import conditions_dict_for_solution, pkm_pairs_for_conditions_dict
from flax.util.hash import std_hash
CONDITIONS = dict((k, bytes(v)[0]) for k, v in ConditionOpcode.__members__.items()) # pylint: disable=E1101
KFA = {v: k for k, v in CONDITIONS.items()}
# information needed to spend a cc
# if we ever support more genesis conditions, like a re-issuable coin,
# we may need also to save the `genesis_coin_mod` or its hash
def disassemble(sexp: Program):
"""
This version of `disassemble` also disassembles condition opcodes like `ASSERT_ANNOUNCEMENT_CONSUMED`.
"""
kfa = dict(KEYWORD_FROM_ATOM)
kfa.update((Program.to(k).as_atom(), v) for k, v in KFA.items())
return bu_disassemble(sexp, kfa)
def coin_as_program(coin: Coin) -> Program:
"""
Convenience function for when putting `coin_info` into a solution.
"""
return Program.to([coin.parent_coin_info, coin.puzzle_hash, coin.amount])
def dump_coin(coin: Coin) -> str:
return disassemble(coin_as_program(coin))
def debug_spend_bundle(spend_bundle, agg_sig_additional_data=DEFAULT_CONSTANTS.AGG_SIG_ME_ADDITIONAL_DATA) -> None:
"""
Print a lot of useful information about a `SpendBundle` that might help with debugging
its clvm.
"""
pks = []
msgs = []
created_coin_announcements: List[List[bytes]] = []
asserted_coin_announcements = []
created_puzzle_announcements: List[List[bytes]] = []
asserted_puzzle_announcements = []
print("=" * 80)
for coin_spend in spend_bundle.coin_spends:
coin = coin_spend.coin
puzzle_reveal = Program.from_bytes(bytes(coin_spend.puzzle_reveal))
solution = Program.from_bytes(bytes(coin_spend.solution))
coin_name = coin.name()
if puzzle_reveal.get_tree_hash() != coin_spend.coin.puzzle_hash:
print("*** BAD PUZZLE REVEAL")
print(f"{puzzle_reveal.get_tree_hash().hex()} vs {coin_spend.coin.puzzle_hash.hex()}")
print("*" * 80)
continue
print(f"consuming coin {dump_coin(coin)}")
print(f" with id {coin_name}")
print()
print(f"\nbrun -y main.sym '{bu_disassemble(puzzle_reveal)}' '{bu_disassemble(solution)}'")
error, conditions, cost = conditions_dict_for_solution(puzzle_reveal, solution, INFINITE_COST)
if error:
print(f"*** error {error}")
elif conditions is not None:
for pk_bytes, m in pkm_pairs_for_conditions_dict(conditions, coin_name, agg_sig_additional_data):
pks.append(G1Element.from_bytes(pk_bytes))
msgs.append(m)
print()
cost, r = puzzle_reveal.run_with_cost(INFINITE_COST, solution) # type: ignore
print(disassemble(r))
print()
if conditions and len(conditions) > 0:
print("grouped conditions:")
for condition_programs in conditions.values():
print()
for c in condition_programs:
if len(c.vars) == 1:
as_prog = Program.to([c.opcode, c.vars[0]])
if len(c.vars) == 2:
as_prog = Program.to([c.opcode, c.vars[0], c.vars[1]])
print(f" {disassemble(as_prog)}")
created_coin_announcements.extend(
[coin_name] + _.vars for _ in conditions.get(ConditionOpcode.CREATE_COIN_ANNOUNCEMENT, [])
)
asserted_coin_announcements.extend(
[_.vars[0].hex() for _ in conditions.get(ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT, [])]
)
created_puzzle_announcements.extend(
[puzzle_reveal.get_tree_hash()] + _.vars
for _ in conditions.get(ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT, [])
)
asserted_puzzle_announcements.extend(
[_.vars[0].hex() for _ in conditions.get(ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT, [])]
)
print()
else:
print("(no output conditions generated)")
print()
print("-------")
created = set(spend_bundle.additions())
spent = set(spend_bundle.removals())
zero_coin_set = set(coin.name() for coin in created if coin.amount == 0)
ephemeral = created.intersection(spent)
created.difference_update(ephemeral)
spent.difference_update(ephemeral)
print()
print("spent coins")
for coin in sorted(spent, key=lambda _: _.name()):
print(f" {dump_coin(coin)}")
print(f" => spent coin id {coin.name()}")
print()
print("created coins")
for coin in sorted(created, key=lambda _: _.name()):
print(f" {dump_coin(coin)}")
print(f" => created coin id {coin.name()}")
if ephemeral:
print()
print("ephemeral coins")
for coin in sorted(ephemeral, key=lambda _: _.name()):
print(f" {dump_coin(coin)}")
print(f" => created coin id {coin.name()}")
created_coin_announcement_pairs = [(_, std_hash(b"".join(_)).hex()) for _ in created_coin_announcements]
if created_coin_announcement_pairs:
print("created coin announcements")
for announcement, hashed in sorted(created_coin_announcement_pairs, key=lambda _: _[-1]):
as_hex = [f"0x{_.hex()}" for _ in announcement]
print(f" {as_hex} =>\n {hashed}")
eor_coin_announcements = sorted(
set(_[-1] for _ in created_coin_announcement_pairs) ^ set(asserted_coin_announcements)
)
created_puzzle_announcement_pairs = [(_, std_hash(b"".join(_)).hex()) for _ in created_puzzle_announcements]
if created_puzzle_announcements:
print("created puzzle announcements")
for announcement, hashed in sorted(created_puzzle_announcement_pairs, key=lambda _: _[-1]):
as_hex = [f"0x{_.hex()}" for _ in announcement]
print(f" {as_hex} =>\n {hashed}")
eor_puzzle_announcements = sorted(
set(_[-1] for _ in created_puzzle_announcement_pairs) ^ set(asserted_puzzle_announcements)
)
print()
print()
print(f"zero_coin_set = {sorted(zero_coin_set)}")
print()
if created_coin_announcement_pairs or asserted_coin_announcements:
print(f"created coin announcements = {sorted([_[-1] for _ in created_coin_announcement_pairs])}")
print()
print(f"asserted coin announcements = {sorted(asserted_coin_announcements)}")
print()
print(f"symdiff of coin announcements = {sorted(eor_coin_announcements)}")
print()
if created_puzzle_announcement_pairs or asserted_puzzle_announcements:
print(f"created puzzle announcements = {sorted([_[-1] for _ in created_puzzle_announcement_pairs])}")
print()
print(f"asserted puzzle announcements = {sorted(asserted_puzzle_announcements)}")
print()
print(f"symdiff of puzzle announcements = {sorted(eor_puzzle_announcements)}")
print()
print()
print("=" * 80)
print()
validates = AugSchemeMPL.aggregate_verify(pks, msgs, spend_bundle.aggregated_signature)
print(f"aggregated signature check pass: {validates}")
print(f"pks: {pks}")
print(f"msgs: {[msg.hex() for msg in msgs]}")
print(f" msg_data: {[msg.hex()[:-128] for msg in msgs]}")
print(f" coin_ids: {[msg.hex()[-128:-64] for msg in msgs]}")
print(f" add_data: {[msg.hex()[-64:] for msg in msgs]}")
print(f"signature: {spend_bundle.aggregated_signature}")
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/util/merkle_tree.py | flax/wallet/util/merkle_tree.py | from __future__ import annotations
import math
from enum import Enum
from typing import List, Optional, Tuple
from clvm.casts import int_to_bytes
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.hash import std_hash
ONE = int_to_bytes(1)
TWO = int_to_bytes(2)
def hash_a_pair(left: bytes32, right: bytes32) -> bytes32:
return std_hash(TWO + left + right)
def hash_an_atom(atom: bytes32) -> bytes32:
return std_hash(ONE + atom)
class TreeType(Enum):
TREE = 1
WATERFALL = 2
class MerkleTree:
type: TreeType
nodes: List[bytes32]
def __init__(self, nodes: List[bytes32], waterfall: bool = False) -> None:
self.type = TreeType.WATERFALL if waterfall else TreeType.TREE
self.nodes = nodes
def split_list(self, puzzle_hashes: List[bytes32]) -> Tuple[List[bytes32], List[bytes32]]:
if self.type == TreeType.TREE:
mid_index = math.ceil(len(puzzle_hashes) / 2)
first = puzzle_hashes[0:mid_index]
rest = puzzle_hashes[mid_index : len(puzzle_hashes)]
else:
first = puzzle_hashes[0:-1]
rest = puzzle_hashes[-1 : len(puzzle_hashes)]
return first, rest
def _root(self, puzzle_hashes: List[bytes32]) -> bytes32:
if len(puzzle_hashes) == 1:
return hash_an_atom(puzzle_hashes[0])
else:
first, rest = self.split_list(puzzle_hashes)
return hash_a_pair(self._root(first), self._root(rest))
def calculate_root(self) -> bytes32:
return self._root(self.nodes)
def _proof(
self, puzzle_hashes: List[bytes32], searching_for: bytes32
) -> Tuple[Optional[int], Optional[List[bytes32]], bytes32, Optional[int]]:
if len(puzzle_hashes) == 1:
atom_hash = hash_an_atom(puzzle_hashes[0])
if puzzle_hashes[0] == searching_for:
return (0, [], atom_hash, 0)
else:
return (None, [], atom_hash, None)
else:
first, rest = self.split_list(puzzle_hashes)
first_hash = self._proof(first, searching_for)
rest_hash = self._proof(rest, searching_for)
final_path = None
final_list = None
bit_num = None
if first_hash[0] is not None:
final_list = first_hash[1]
# TODO: handle hints
# error: Item "None" of "Optional[List[bytes32]]" has no attribute "append" [union-attr]
final_list.append(rest_hash[2]) # type: ignore[union-attr]
bit_num = first_hash[3]
final_path = first_hash[0]
elif rest_hash[0] is not None:
final_list = rest_hash[1]
# TODO: handle hints
# error: Item "None" of "Optional[List[bytes32]]" has no attribute "append" [union-attr]
final_list.append(first_hash[2]) # type: ignore[union-attr]
bit_num = rest_hash[3]
# TODO: handle hints
# error: Unsupported operand types for << ("int" and "None") [operator]
# note: Right operand is of type "Optional[int]"
final_path = rest_hash[0] | (1 << bit_num) # type: ignore[operator]
pair_hash = hash_a_pair(first_hash[2], rest_hash[2])
return (final_path, final_list, pair_hash, bit_num + 1 if bit_num is not None else None)
def generate_proof(self, leaf_reveal: bytes32) -> Tuple[Optional[int], List[Optional[List[bytes32]]]]:
proof = self._proof(self.nodes, leaf_reveal)
return (proof[0], [proof[1]])
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/util/puzzle_compression.py | flax/wallet/util/puzzle_compression.py | import zlib
from typing import List
from flax.types.blockchain_format.program import Program
from flax.wallet.puzzles.load_clvm import load_clvm_maybe_recompile
from flax.wallet.puzzles import p2_delegated_puzzle_or_hidden_puzzle as standard_puzzle
from flax.wallet.puzzles.cat_loader import CAT_MOD
from flax.wallet.nft_wallet.nft_puzzles import (
SINGLETON_TOP_LAYER_MOD,
NFT_STATE_LAYER_MOD,
NFT_OWNERSHIP_LAYER,
NFT_METADATA_UPDATER,
NFT_TRANSFER_PROGRAM_DEFAULT,
)
# Need the legacy CAT mod for zlib backwards compatibility
LEGACY_CAT_MOD = Program.fromhex(
"ff02ffff01ff02ff5effff04ff02ffff04ffff04ff05ffff04ffff0bff2cff0580ffff04ff0bff80808080ffff04ffff02ff17ff2f80ffff04ff5fffff04ffff02ff2effff04ff02ffff04ff17ff80808080ffff04ffff0bff82027fff82057fff820b7f80ffff04ff81bfffff04ff82017fffff04ff8202ffffff04ff8205ffffff04ff820bffff80808080808080808080808080ffff04ffff01ffffffff81ca3dff46ff0233ffff3c04ff01ff0181cbffffff02ff02ffff03ff05ffff01ff02ff32ffff04ff02ffff04ff0dffff04ffff0bff22ffff0bff2cff3480ffff0bff22ffff0bff22ffff0bff2cff5c80ff0980ffff0bff22ff0bffff0bff2cff8080808080ff8080808080ffff010b80ff0180ffff02ffff03ff0bffff01ff02ffff03ffff09ffff02ff2effff04ff02ffff04ff13ff80808080ff820b9f80ffff01ff02ff26ffff04ff02ffff04ffff02ff13ffff04ff5fffff04ff17ffff04ff2fffff04ff81bfffff04ff82017fffff04ff1bff8080808080808080ffff04ff82017fff8080808080ffff01ff088080ff0180ffff01ff02ffff03ff17ffff01ff02ffff03ffff20ff81bf80ffff0182017fffff01ff088080ff0180ffff01ff088080ff018080ff0180ffff04ffff04ff05ff2780ffff04ffff10ff0bff5780ff778080ff02ffff03ff05ffff01ff02ffff03ffff09ffff02ffff03ffff09ff11ff7880ffff0159ff8080ff0180ffff01818f80ffff01ff02ff7affff04ff02ffff04ff0dffff04ff0bffff04ffff04ff81b9ff82017980ff808080808080ffff01ff02ff5affff04ff02ffff04ffff02ffff03ffff09ff11ff7880ffff01ff04ff78ffff04ffff02ff36ffff04ff02ffff04ff13ffff04ff29ffff04ffff0bff2cff5b80ffff04ff2bff80808080808080ff398080ffff01ff02ffff03ffff09ff11ff2480ffff01ff04ff24ffff04ffff0bff20ff2980ff398080ffff010980ff018080ff0180ffff04ffff02ffff03ffff09ff11ff7880ffff0159ff8080ff0180ffff04ffff02ff7affff04ff02ffff04ff0dffff04ff0bffff04ff17ff808080808080ff80808080808080ff0180ffff01ff04ff80ffff04ff80ff17808080ff0180ffffff02ffff03ff05ffff01ff04ff09ffff02ff26ffff04ff02ffff04ff0dffff04ff0bff808080808080ffff010b80ff0180ff0bff22ffff0bff2cff5880ffff0bff22ffff0bff22ffff0bff2cff5c80ff0580ffff0bff22ffff02ff32ffff04ff02ffff04ff07ffff04ffff0bff2cff2c80ff8080808080ffff0bff2cff8080808080ffff02ffff03ffff07ff0580ffff01ff0bffff0102ffff02ff2effff04ff02ffff04ff09ff80808080ffff02ff2effff04ff02ffff04ff0dff8080808080ffff01ff0bff2cff058080ff0180ffff04ffff04ff28ffff04ff5fff808080ffff02ff7effff04ff02ffff04ffff04ffff04ff2fff0580ffff04ff5fff82017f8080ffff04ffff02ff7affff04ff02ffff04ff0bffff04ff05ffff01ff808080808080ffff04ff17ffff04ff81bfffff04ff82017fffff04ffff0bff8204ffffff02ff36ffff04ff02ffff04ff09ffff04ff820affffff04ffff0bff2cff2d80ffff04ff15ff80808080808080ff8216ff80ffff04ff8205ffffff04ff820bffff808080808080808080808080ff02ff2affff04ff02ffff04ff5fffff04ff3bffff04ffff02ffff03ff17ffff01ff09ff2dffff0bff27ffff02ff36ffff04ff02ffff04ff29ffff04ff57ffff04ffff0bff2cff81b980ffff04ff59ff80808080808080ff81b78080ff8080ff0180ffff04ff17ffff04ff05ffff04ff8202ffffff04ffff04ffff04ff24ffff04ffff0bff7cff2fff82017f80ff808080ffff04ffff04ff30ffff04ffff0bff81bfffff0bff7cff15ffff10ff82017fffff11ff8202dfff2b80ff8202ff808080ff808080ff138080ff80808080808080808080ff018080" # noqa
)
OFFER_MOD_OLD = load_clvm_maybe_recompile("settlement_payments_old.clvm")
OFFER_MOD = load_clvm_maybe_recompile("settlement_payments.clvm")
# For backwards compatibility to work, we must assume that these mods (already deployed) will not change
# In the case that they do change and we don't support the old asset then we need to keep around the legacy module
ZDICT = [
bytes(standard_puzzle.MOD) + bytes(LEGACY_CAT_MOD),
bytes(OFFER_MOD_OLD),
bytes(SINGLETON_TOP_LAYER_MOD)
+ bytes(NFT_STATE_LAYER_MOD)
+ bytes(NFT_OWNERSHIP_LAYER)
+ bytes(NFT_METADATA_UPDATER)
+ bytes(NFT_TRANSFER_PROGRAM_DEFAULT),
bytes(CAT_MOD),
bytes(OFFER_MOD),
# more dictionaries go here
]
LATEST_VERSION = len(ZDICT)
class CompressionVersionError(Exception):
def __init__(self, version_number: int):
self.message = f"The data is compressed with version {version_number} and cannot be parsed. "
self.message += "Update software and try again."
def zdict_for_version(version: int) -> bytes:
summed_dictionary = b""
for version_dict in ZDICT[0:version]:
summed_dictionary += version_dict
return summed_dictionary
def compress_with_zdict(blob: bytes, zdict: bytes) -> bytes:
comp_obj = zlib.compressobj(zdict=zdict)
compressed_blob = comp_obj.compress(blob)
compressed_blob += comp_obj.flush()
return compressed_blob
def decompress_with_zdict(blob: bytes, zdict: bytes) -> bytes:
do = zlib.decompressobj(zdict=zdict)
return do.decompress(blob)
def decompress_object_with_puzzles(compressed_object_blob: bytes) -> bytes:
version = int.from_bytes(compressed_object_blob[0:2], "big")
if version > len(ZDICT):
raise CompressionVersionError(version)
zdict = zdict_for_version(version)
object_bytes = decompress_with_zdict(compressed_object_blob[2:], zdict)
return object_bytes
def compress_object_with_puzzles(object_bytes: bytes, version: int) -> bytes:
version_blob = version.to_bytes(length=2, byteorder="big")
zdict = zdict_for_version(version)
compressed_object_blob = compress_with_zdict(object_bytes, zdict)
return version_blob + compressed_object_blob
def lowest_best_version(puzzle_list: List[bytes], max_version: int = len(ZDICT)) -> int:
highest_version = 1
for mod in puzzle_list:
for version, dict in enumerate(ZDICT):
if version > max_version:
break
if bytes(mod) in dict:
highest_version = max(highest_version, version + 1)
return highest_version
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/util/json_clvm_utils.py | flax/wallet/util/json_clvm_utils.py | from __future__ import annotations
from typing import Any
from flax.types.blockchain_format.program import Program
def json_to_flaxlisp(json_data: Any) -> Any:
list_for_flaxlisp = []
if isinstance(json_data, list):
for value in json_data:
list_for_flaxlisp.append(json_to_flaxlisp(value))
else:
if isinstance(json_data, dict):
for key, value in json_data:
list_for_flaxlisp.append((key, json_to_flaxlisp(value)))
else:
list_for_flaxlisp = json_data
return Program.to(list_for_flaxlisp)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/trading/trade_store.py | flax/wallet/trading/trade_store.py | import logging
from time import perf_counter
from typing import List, Optional, Tuple, Set
import aiosqlite
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.mempool_inclusion_status import MempoolInclusionStatus
from flax.util.db_wrapper import DBWrapper2
from flax.util.errors import Err
from flax.util.ints import uint8, uint32
from flax.wallet.trade_record import TradeRecord
from flax.wallet.trading.trade_status import TradeStatus
async def migrate_coin_of_interest(log: logging.Logger, db: aiosqlite.Connection) -> None:
log.info("Beginning migration of coin_of_interest_to_trade_record lookup table")
start_time = perf_counter()
rows = await db.execute_fetchall("SELECT trade_record, trade_id from trade_records")
inserts: List[Tuple[bytes32, bytes32]] = []
for row in rows:
record: TradeRecord = TradeRecord.from_bytes(row[0])
for coin in record.coins_of_interest:
inserts.append((coin.name(), record.trade_id))
if not inserts:
# no trades to migrate
return
try:
await db.executemany(
"INSERT INTO coin_of_interest_to_trade_record " "(coin_id, trade_id) " "VALUES(?, ?)", inserts
)
except (aiosqlite.OperationalError, aiosqlite.IntegrityError):
log.exception("Failed to migrate coin_of_interest lookup table for trade_records")
raise
end_time = perf_counter()
log.info(
f"Completed coin_of_interest lookup table migration of {len(inserts)} "
f"records in {end_time - start_time} seconds"
)
async def migrate_is_my_offer(log: logging.Logger, db_connection: aiosqlite.Connection) -> None:
"""
Migrate the is_my_offer property contained in the serialized TradeRecord (trade_record column)
to the is_my_offer column in the trade_records table.
"""
log.info("Beginning migration of is_my_offer property in trade_records")
start_time = perf_counter()
cursor = await db_connection.execute("SELECT trade_record, trade_id from trade_records")
rows = await cursor.fetchall()
await cursor.close()
updates: List[Tuple[int, str]] = []
for row in rows:
record = TradeRecord.from_bytes(row[0])
is_my_offer = 1 if record.is_my_offer else 0
updates.append((is_my_offer, row[1]))
try:
await db_connection.executemany("UPDATE trade_records SET is_my_offer=? WHERE trade_id=?", updates)
except (aiosqlite.OperationalError, aiosqlite.IntegrityError):
log.exception("Failed to migrate is_my_offer property in trade_records")
raise
end_time = perf_counter()
log.info(f"Completed migration of {len(updates)} records in {end_time - start_time} seconds")
class TradeStore:
"""
TradeStore stores trading history.
"""
cache_size: uint32
db_wrapper: DBWrapper2
log: logging.Logger
@classmethod
async def create(
cls, db_wrapper: DBWrapper2, cache_size: uint32 = uint32(600000), name: Optional[str] = None
) -> "TradeStore":
self = cls()
if name:
self.log = logging.getLogger(name)
else:
self.log = logging.getLogger(__name__)
self.cache_size = cache_size
self.db_wrapper = db_wrapper
async with self.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute(
(
"CREATE TABLE IF NOT EXISTS trade_records("
" trade_record blob,"
" trade_id text PRIMARY KEY,"
" status int,"
" confirmed_at_index int,"
" created_at_time bigint,"
" sent int,"
" is_my_offer tinyint)"
)
)
await conn.execute(
("CREATE TABLE IF NOT EXISTS coin_of_interest_to_trade_record(" " trade_id blob," " coin_id blob)")
)
await conn.execute(
"CREATE INDEX IF NOT EXISTS coin_to_trade_record_index on " "coin_of_interest_to_trade_record(trade_id)"
)
# coin of interest migration check
trades_not_emtpy = await (await conn.execute("SELECT trade_id FROM trade_records LIMIT 1")).fetchone()
coins_emtpy = not await (
await conn.execute("SELECT coin_id FROM coin_of_interest_to_trade_record LIMIT 1")
).fetchone()
# run coin of interest migration if we find any existing rows in trade records
if trades_not_emtpy and coins_emtpy:
migrate_coin_of_interest_col = True
else:
migrate_coin_of_interest_col = False
# Attempt to add the is_my_offer column. If successful, migrate is_my_offer to the new column.
needs_is_my_offer_migration: bool = False
try:
await conn.execute("ALTER TABLE trade_records ADD COLUMN is_my_offer tinyint")
needs_is_my_offer_migration = True
except aiosqlite.OperationalError:
pass # ignore what is likely Duplicate column error
await conn.execute("CREATE INDEX IF NOT EXISTS trade_confirmed_index on trade_records(confirmed_at_index)")
await conn.execute("CREATE INDEX IF NOT EXISTS trade_status on trade_records(status)")
await conn.execute("CREATE INDEX IF NOT EXISTS trade_id on trade_records(trade_id)")
if needs_is_my_offer_migration:
await migrate_is_my_offer(self.log, conn)
if migrate_coin_of_interest_col:
await migrate_coin_of_interest(self.log, conn)
return self
async def add_trade_record(self, record: TradeRecord) -> None:
"""
Store TradeRecord into DB
"""
async with self.db_wrapper.writer_maybe_transaction() as conn:
cursor = await conn.execute(
"INSERT OR REPLACE INTO trade_records "
"(trade_record, trade_id, status, confirmed_at_index, created_at_time, sent, is_my_offer) "
"VALUES(?, ?, ?, ?, ?, ?, ?)",
(
bytes(record),
record.trade_id.hex(),
record.status,
record.confirmed_at_index,
record.created_at_time,
record.sent,
record.is_my_offer,
),
)
await cursor.close()
# remove all current coin ids
await conn.execute("DELETE FROM coin_of_interest_to_trade_record WHERE trade_id=?", (record.trade_id,))
# now recreate them all
inserts: List[Tuple[bytes32, bytes32]] = []
for coin in record.coins_of_interest:
inserts.append((coin.name(), record.trade_id))
await conn.executemany(
"INSERT INTO coin_of_interest_to_trade_record (coin_id, trade_id) VALUES(?, ?)", inserts
)
async def set_status(self, trade_id: bytes32, status: TradeStatus, index: uint32 = uint32(0)) -> None:
"""
Updates the status of the trade
"""
current: Optional[TradeRecord] = await self.get_trade_record(trade_id)
if current is None:
return
confirmed_at_index = current.confirmed_at_index
if index != 0:
confirmed_at_index = index
tx: TradeRecord = TradeRecord(
confirmed_at_index=confirmed_at_index,
accepted_at_time=current.accepted_at_time,
created_at_time=current.created_at_time,
is_my_offer=current.is_my_offer,
sent=current.sent,
offer=current.offer,
taken_offer=current.taken_offer,
coins_of_interest=current.coins_of_interest,
trade_id=current.trade_id,
status=uint32(status.value),
sent_to=current.sent_to,
)
await self.add_trade_record(tx)
async def increment_sent(
self, id: bytes32, name: str, send_status: MempoolInclusionStatus, err: Optional[Err]
) -> bool:
"""
Updates trade sent count (Full Node has received spend_bundle and sent ack).
"""
current: Optional[TradeRecord] = await self.get_trade_record(id)
if current is None:
return False
sent_to = current.sent_to.copy()
err_str = err.name if err is not None else None
append_data = (name, uint8(send_status.value), err_str)
# Don't increment count if it's already sent to this peer
if append_data in sent_to:
return False
sent_to.append(append_data)
tx: TradeRecord = TradeRecord(
confirmed_at_index=current.confirmed_at_index,
accepted_at_time=current.accepted_at_time,
created_at_time=current.created_at_time,
is_my_offer=current.is_my_offer,
sent=uint32(current.sent + 1),
offer=current.offer,
taken_offer=current.taken_offer,
coins_of_interest=current.coins_of_interest,
trade_id=current.trade_id,
status=current.status,
sent_to=sent_to,
)
await self.add_trade_record(tx)
return True
async def get_trades_count(self) -> Tuple[int, int, int]:
"""
Returns the number of trades in the database broken down by is_my_offer status
"""
query = "SELECT COUNT(*) AS total, "
query += "SUM(CASE WHEN is_my_offer=1 THEN 1 ELSE 0 END) AS my_offers, "
query += "SUM(CASE WHEN is_my_offer=0 THEN 1 ELSE 0 END) AS taken_offers "
query += "FROM trade_records"
async with self.db_wrapper.reader_no_transaction() as conn:
cursor = await conn.execute(query)
row = await cursor.fetchone()
await cursor.close()
total = 0
my_offers_count = 0
taken_offers_count = 0
if row is not None:
if row[0] is not None:
total = int(row[0])
if row[1] is not None:
my_offers_count = int(row[1])
if row[2] is not None:
taken_offers_count = int(row[2])
return total, my_offers_count, taken_offers_count
async def get_trade_record(self, trade_id: bytes32) -> Optional[TradeRecord]:
"""
Checks DB for TradeRecord with id: id and returns it.
"""
async with self.db_wrapper.reader_no_transaction() as conn:
cursor = await conn.execute("SELECT trade_record from trade_records WHERE trade_id=?", (trade_id.hex(),))
row = await cursor.fetchone()
await cursor.close()
if row is not None:
record: TradeRecord = TradeRecord.from_bytes(row[0])
return record
return None
async def get_trade_record_with_status(self, status: TradeStatus) -> List[TradeRecord]:
"""
Checks DB for TradeRecord with id: id and returns it.
"""
async with self.db_wrapper.reader_no_transaction() as conn:
cursor = await conn.execute("SELECT trade_record from trade_records WHERE status=?", (status.value,))
rows = await cursor.fetchall()
await cursor.close()
records = []
for row in rows:
record = TradeRecord.from_bytes(row[0])
records.append(record)
return records
async def get_coin_ids_of_interest_with_trade_statuses(self, trade_statuses: List[TradeStatus]) -> Set[bytes32]:
"""
Checks DB for TradeRecord with id: id and returns it.
"""
async with self.db_wrapper.reader_no_transaction() as conn:
rows = await conn.execute_fetchall(
"SELECT distinct cl.coin_id "
"from coin_of_interest_to_trade_record cl, trade_records t "
"WHERE "
"t.status in (%s) "
"AND LOWER(hex(cl.trade_id)) = t.trade_id " % (",".join("?" * len(trade_statuses)),),
[x.value for x in trade_statuses],
)
return {bytes32(row[0]) for row in rows}
async def get_not_sent(self) -> List[TradeRecord]:
"""
Returns the list of trades that have not been received by full node yet.
"""
async with self.db_wrapper.reader_no_transaction() as conn:
cursor = await conn.execute("SELECT trade_record from trade_records WHERE sent<? and confirmed=?", (4, 0))
rows = await cursor.fetchall()
await cursor.close()
records = []
for row in rows:
record = TradeRecord.from_bytes(row[0])
records.append(record)
return records
async def get_all_unconfirmed(self) -> List[TradeRecord]:
"""
Returns the list of all trades that have not yet been confirmed.
"""
async with self.db_wrapper.reader_no_transaction() as conn:
cursor = await conn.execute("SELECT trade_record from trade_records WHERE confirmed=?", (0,))
rows = await cursor.fetchall()
await cursor.close()
records = []
for row in rows:
record = TradeRecord.from_bytes(row[0])
records.append(record)
return records
async def get_all_trades(self) -> List[TradeRecord]:
"""
Returns all stored trades.
"""
async with self.db_wrapper.reader_no_transaction() as conn:
cursor = await conn.execute("SELECT trade_record from trade_records")
rows = await cursor.fetchall()
await cursor.close()
records = []
for row in rows:
record = TradeRecord.from_bytes(row[0])
records.append(record)
return records
async def get_trades_between(
self,
start: int,
end: int,
*,
sort_key: Optional[str] = None,
reverse: bool = False,
exclude_my_offers: bool = False,
exclude_taken_offers: bool = False,
include_completed: bool = False,
) -> List[TradeRecord]:
"""
Return a list of trades sorted by a key and between a start and end index.
"""
if start < 0:
raise ValueError("start must be >= 0")
if start > end:
raise ValueError("start must be less than or equal to end")
# If excluding everything, return an empty list
if exclude_my_offers and exclude_taken_offers:
return []
offset = start
limit = end - start
where_status_clause: Optional[str] = None
order_by_clause: Optional[str] = None
if not include_completed:
# Construct a WHERE clause that only looks at active/pending statuses
where_status_clause = (
f"(status={TradeStatus.PENDING_ACCEPT.value} OR "
f"status={TradeStatus.PENDING_CONFIRM.value} OR "
f"status={TradeStatus.PENDING_CANCEL.value}) "
)
# Create an ORDER BY clause according to the desired sort type
if sort_key is None or sort_key == "CONFIRMED_AT_HEIGHT":
order_by_clause = (
f"ORDER BY confirmed_at_index {'ASC' if reverse else 'DESC'}, "
f"trade_id {'DESC' if reverse else 'ASC'} "
)
elif sort_key == "RELEVANCE":
# Custom sort order for statuses to separate out pending/completed offers
ordered_statuses = [
# Pending statuses are grouped together and ordered by creation date/confirmation height
(TradeStatus.PENDING_ACCEPT.value, 1 if reverse else 0),
(TradeStatus.PENDING_CONFIRM.value, 1 if reverse else 0),
(TradeStatus.PENDING_CANCEL.value, 1 if reverse else 0),
# Cancelled/Confirmed/Failed are grouped together and ordered by creation date/confirmation height
(TradeStatus.CANCELLED.value, 0 if reverse else 1),
(TradeStatus.CONFIRMED.value, 0 if reverse else 1),
(TradeStatus.FAILED.value, 0 if reverse else 1),
]
if reverse:
ordered_statuses.reverse()
# Create the "WHEN {status} THEN {index}" cases for the "CASE status" statement
ordered_status_clause = " ".join(map(lambda x: f"WHEN {x[0]} THEN {x[1]}", ordered_statuses))
ordered_status_clause = f"CASE status {ordered_status_clause} END, "
order_by_clause = (
f"ORDER BY "
f"{ordered_status_clause} "
f"created_at_time {'ASC' if reverse else 'DESC'}, "
f"confirmed_at_index {'ASC' if reverse else 'DESC'}, "
f"trade_id {'DESC' if reverse else 'ASC'} "
)
else:
raise ValueError(f"No known sort {sort_key}")
query = "SELECT trade_record FROM trade_records "
args = []
if exclude_my_offers or exclude_taken_offers:
# We check if exclude_my_offers == exclude_taken_offers earlier and return [] if so
is_my_offer_val = 0 if exclude_my_offers else 1
args.append(is_my_offer_val)
query += "WHERE is_my_offer=? "
# Include the additional WHERE status clause if we're filtering out certain statuses
if where_status_clause is not None:
query += "AND " + where_status_clause
else:
query = "SELECT trade_record FROM trade_records "
# Include the additional WHERE status clause if we're filtering out certain statuses
if where_status_clause is not None:
query += "WHERE " + where_status_clause
# Include the ORDER BY clause
if order_by_clause is not None:
query += order_by_clause
# Include the LIMIT clause
query += "LIMIT ? OFFSET ?"
args.extend([limit, offset])
async with self.db_wrapper.reader_no_transaction() as conn:
cursor = await conn.execute(query, tuple(args))
rows = await cursor.fetchall()
await cursor.close()
records = []
for row in rows:
record = TradeRecord.from_bytes(row[0])
records.append(record)
return records
async def get_trades_above(self, height: uint32) -> List[TradeRecord]:
async with self.db_wrapper.reader_no_transaction() as conn:
cursor = await conn.execute("SELECT trade_record from trade_records WHERE confirmed_at_index>?", (height,))
rows = await cursor.fetchall()
await cursor.close()
records = []
for row in rows:
record = TradeRecord.from_bytes(row[0])
records.append(record)
return records
async def rollback_to_block(self, block_index: int) -> None:
async with self.db_wrapper.writer_maybe_transaction() as conn:
# Delete from storage
cursor = await conn.execute("DELETE FROM trade_records WHERE confirmed_at_index>?", (block_index,))
await cursor.close()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/trading/__init__.py | flax/wallet/trading/__init__.py | python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false | |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/trading/trade_status.py | flax/wallet/trading/trade_status.py | from __future__ import annotations
from enum import Enum
class TradeStatus(Enum):
PENDING_ACCEPT = 0
PENDING_CONFIRM = 1
PENDING_CANCEL = 2
CANCELLED = 3
CONFIRMED = 4
FAILED = 5
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/trading/offer.py | flax/wallet/trading/offer.py | from __future__ import annotations
from dataclasses import dataclass
from typing import Any, Dict, List, Optional, Set, Tuple, Union, BinaryIO
from blspy import G2Element
from clvm_tools.binutils import disassemble
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.blockchain_format.coin import Coin, coin_as_list
from flax.types.blockchain_format.program import Program, INFINITE_COST
from flax.types.announcement import Announcement
from flax.types.coin_spend import CoinSpend
from flax.types.spend_bundle import SpendBundle
from flax.util.bech32m import bech32_decode, bech32_encode, convertbits
from flax.util.ints import uint64
from flax.wallet.outer_puzzles import (
construct_puzzle,
create_asset_id,
match_puzzle,
solve_puzzle,
get_inner_puzzle,
get_inner_solution,
)
from flax.wallet.payment import Payment
from flax.wallet.puzzle_drivers import PuzzleInfo, Solver
from flax.wallet.puzzles.load_clvm import load_clvm_maybe_recompile
from flax.wallet.util.puzzle_compression import (
compress_object_with_puzzles,
decompress_object_with_puzzles,
lowest_best_version,
)
from flax.wallet.uncurried_puzzle import UncurriedPuzzle, uncurry_puzzle
OFFER_MOD_OLD = load_clvm_maybe_recompile("settlement_payments_old.clvm")
OFFER_MOD = load_clvm_maybe_recompile("settlement_payments_old.clvm")
OFFER_MOD_OLD_HASH = OFFER_MOD_OLD.get_tree_hash()
OFFER_MOD_HASH = OFFER_MOD.get_tree_hash()
ZERO_32 = bytes32([0] * 32)
def detect_dependent_coin(
names: List[bytes32], deps: Dict[bytes32, List[bytes32]], announcement_dict: Dict[bytes32, List[bytes32]]
) -> Optional[Tuple[bytes32, bytes32]]:
# First, we check for any dependencies on coins in the same bundle
for name in names:
for dependency in deps[name]:
for coin, announces in announcement_dict.items():
if dependency in announces and coin != name:
# We found one, now remove it and anything that depends on it (except the "provider")
return name, coin
return None
@dataclass(frozen=True)
class NotarizedPayment(Payment):
nonce: bytes32 = ZERO_32
@classmethod
def from_condition_and_nonce(cls, condition: Program, nonce: bytes32) -> "NotarizedPayment":
with_opcode: Program = Program.to((51, condition)) # Gotta do this because the super class is expecting it
p = Payment.from_condition(with_opcode)
puzzle_hash, amount, memos = tuple(p.as_condition_args())
return cls(puzzle_hash, amount, memos, nonce)
@dataclass(frozen=True)
class Offer:
requested_payments: Dict[
Optional[bytes32], List[NotarizedPayment]
] # The key is the asset id of the asset being requested
bundle: SpendBundle
driver_dict: Dict[bytes32, PuzzleInfo] # asset_id -> asset driver
@staticmethod
def ph() -> bytes32:
return OFFER_MOD_HASH
@staticmethod
def notarize_payments(
requested_payments: Dict[Optional[bytes32], List[Payment]], # `None` means you are requesting XFX
coins: List[Coin],
) -> Dict[Optional[bytes32], List[NotarizedPayment]]:
# This sort should be reproducible in CLVM with `>s`
sorted_coins: List[Coin] = sorted(coins, key=Coin.name)
sorted_coin_list: List[List[Union[bytes32, uint64]]] = [coin_as_list(c) for c in sorted_coins]
nonce: bytes32 = Program.to(sorted_coin_list).get_tree_hash()
notarized_payments: Dict[Optional[bytes32], List[NotarizedPayment]] = {}
for asset_id, payments in requested_payments.items():
notarized_payments[asset_id] = []
for p in payments:
puzzle_hash, amount, memos = tuple(p.as_condition_args())
notarized_payments[asset_id].append(NotarizedPayment(puzzle_hash, amount, memos, nonce))
return notarized_payments
# The announcements returned from this function must be asserted in whatever spend bundle is created by the wallet
@staticmethod
def calculate_announcements(
notarized_payments: Dict[Optional[bytes32], List[NotarizedPayment]], driver_dict: Dict[bytes32, PuzzleInfo]
) -> List[Announcement]:
announcements: List[Announcement] = []
for asset_id, payments in notarized_payments.items():
if asset_id is not None:
if asset_id not in driver_dict:
raise ValueError("Cannot calculate announcements without driver of requested item")
settlement_ph: bytes32 = construct_puzzle(driver_dict[asset_id], OFFER_MOD).get_tree_hash()
else:
settlement_ph = OFFER_MOD_HASH
msg: bytes32 = Program.to((payments[0].nonce, [p.as_condition_args() for p in payments])).get_tree_hash()
announcements.append(Announcement(settlement_ph, msg))
return announcements
def __post_init__(self) -> None:
# Verify that there are no duplicate payments
for payments in self.requested_payments.values():
payment_programs: List[bytes32] = [p.name() for p in payments]
if len(set(payment_programs)) != len(payment_programs):
raise ValueError("Bundle has duplicate requested payments")
# Verify we have a type for every kind of asset
for asset_id in self.requested_payments:
if asset_id is not None and asset_id not in self.driver_dict:
raise ValueError("Offer does not have enough driver information about the requested payments")
def additions(self) -> List[Coin]:
final_list: List[Coin] = []
for cs in self.bundle.coin_spends:
try:
final_list.extend(cs.additions())
except Exception:
pass
return final_list
def removals(self) -> List[Coin]:
return self.bundle.removals()
def incomplete_spends(self) -> List[CoinSpend]:
final_list: List[CoinSpend] = []
for cs in self.bundle.coin_spends:
try:
cs.additions()
except Exception:
final_list.append(cs)
return final_list
# This method does not get every coin that is being offered, only the `settlement_payment` children
# It's also a little heuristic, but it should get most things
def get_offered_coins(self) -> Dict[Optional[bytes32], List[Coin]]:
offered_coins: Dict[Optional[bytes32], List[Coin]] = {}
for parent_spend in self.bundle.coin_spends:
coins_for_this_spend: List[Coin] = []
parent_puzzle: UncurriedPuzzle = uncurry_puzzle(parent_spend.puzzle_reveal.to_program())
parent_solution: Program = parent_spend.solution.to_program()
additions: List[Coin] = [a for a in parent_spend.additions() if a not in self.bundle.removals()]
puzzle_driver = match_puzzle(parent_puzzle)
if puzzle_driver is not None:
asset_id = create_asset_id(puzzle_driver)
inner_puzzle: Optional[Program] = get_inner_puzzle(puzzle_driver, parent_puzzle)
inner_solution: Optional[Program] = get_inner_solution(puzzle_driver, parent_solution)
assert inner_puzzle is not None and inner_solution is not None
conditions: Program = inner_puzzle.run(inner_solution)
matching_spend_additions: List[Coin] = [] # coins that match offered amount and are sent to offer ph.
for condition in conditions.as_iter():
if condition.first() == 51 and condition.rest().first() in [OFFER_MOD_HASH, OFFER_MOD_OLD_HASH]:
matching_spend_additions.extend(
[a for a in additions if a.amount == condition.rest().rest().first().as_int()]
)
if len(matching_spend_additions) == 1:
coins_for_this_spend.append(matching_spend_additions[0])
else:
additions_w_amount_and_puzhash: List[Coin] = [
a
for a in matching_spend_additions
if a.puzzle_hash
in [
construct_puzzle(puzzle_driver, OFFER_MOD_OLD_HASH).get_tree_hash_precalc( # type: ignore
OFFER_MOD_OLD_HASH
),
construct_puzzle(puzzle_driver, OFFER_MOD_HASH).get_tree_hash_precalc( # type: ignore
OFFER_MOD_HASH
),
]
]
if len(additions_w_amount_and_puzhash) == 1:
coins_for_this_spend.append(additions_w_amount_and_puzhash[0])
else:
asset_id = None
coins_for_this_spend.extend(
[a for a in additions if a.puzzle_hash in [OFFER_MOD_HASH, OFFER_MOD_OLD_HASH]]
)
if coins_for_this_spend != []:
offered_coins.setdefault(asset_id, [])
offered_coins[asset_id].extend(coins_for_this_spend)
return offered_coins
def get_offered_amounts(self) -> Dict[Optional[bytes32], int]:
offered_coins: Dict[Optional[bytes32], List[Coin]] = self.get_offered_coins()
offered_amounts: Dict[Optional[bytes32], int] = {}
for asset_id, coins in offered_coins.items():
offered_amounts[asset_id] = uint64(sum([c.amount for c in coins]))
return offered_amounts
def get_requested_payments(self) -> Dict[Optional[bytes32], List[NotarizedPayment]]:
return self.requested_payments
def get_requested_amounts(self) -> Dict[Optional[bytes32], int]:
requested_amounts: Dict[Optional[bytes32], int] = {}
for asset_id, coins in self.get_requested_payments().items():
requested_amounts[asset_id] = uint64(sum([c.amount for c in coins]))
return requested_amounts
def arbitrage(self) -> Dict[Optional[bytes32], int]:
"""
Returns a dictionary of the type of each asset and amount that is involved in the trade
With the amount being how much their offered amount within the offer
exceeds/falls short of their requested amount.
"""
offered_amounts: Dict[Optional[bytes32], int] = self.get_offered_amounts()
requested_amounts: Dict[Optional[bytes32], int] = self.get_requested_amounts()
arbitrage_dict: Dict[Optional[bytes32], int] = {}
for asset_id in [*requested_amounts.keys(), *offered_amounts.keys()]:
arbitrage_dict[asset_id] = offered_amounts.get(asset_id, 0) - requested_amounts.get(asset_id, 0)
return arbitrage_dict
# This is a method mostly for the UI that creates a JSON summary of the offer
def summary(self) -> Tuple[Dict[str, int], Dict[str, int], Dict[str, Dict[str, Any]]]:
offered_amounts: Dict[Optional[bytes32], int] = self.get_offered_amounts()
requested_amounts: Dict[Optional[bytes32], int] = self.get_requested_amounts()
def keys_to_strings(dic: Dict[Optional[bytes32], Any]) -> Dict[str, Any]:
new_dic: Dict[str, Any] = {}
for key in dic:
if key is None:
new_dic["xfx"] = dic[key]
else:
new_dic[key.hex()] = dic[key]
return new_dic
driver_dict: Dict[str, Any] = {}
for key, value in self.driver_dict.items():
driver_dict[key.hex()] = value.info
return keys_to_strings(offered_amounts), keys_to_strings(requested_amounts), driver_dict
# Also mostly for the UI, returns a dictionary of assets and how much of them is pended for this offer
# This method is also imperfect for sufficiently complex spends
def get_pending_amounts(self) -> Dict[str, int]:
all_additions: List[Coin] = self.additions()
all_removals: List[Coin] = self.removals()
non_ephemeral_removals: List[Coin] = list(filter(lambda c: c not in all_additions, all_removals))
pending_dict: Dict[str, int] = {}
# First we add up the amounts of all coins that share an ancestor with the offered coins (i.e. a primary coin)
for asset_id, coins in self.get_offered_coins().items():
name = "xfx" if asset_id is None else asset_id.hex()
pending_dict[name] = 0
for coin in coins:
root_removal: Coin = self.get_root_removal(coin)
for addition in filter(lambda c: c.parent_coin_info == root_removal.name(), all_additions):
pending_dict[name] += addition.amount
# Then we gather anything else as unknown
sum_of_additions_so_far: int = sum(pending_dict.values())
unknown: int = sum([c.amount for c in non_ephemeral_removals]) - sum_of_additions_so_far
if unknown > 0:
pending_dict["unknown"] = unknown
return pending_dict
# This method returns all of the coins that are being used in the offer (without which it would be invalid)
def get_involved_coins(self) -> List[Coin]:
additions = self.additions()
return list(filter(lambda c: c not in additions, self.removals()))
# This returns the non-ephemeral removal that is an ancestor of the specified coin
# This should maybe move to the SpendBundle object at some point
def get_root_removal(self, coin: Coin) -> Coin:
all_removals: Set[Coin] = set(self.removals())
all_removal_ids: Set[bytes32] = {c.name() for c in all_removals}
non_ephemeral_removals: Set[Coin] = {
c for c in all_removals if c.parent_coin_info not in {r.name() for r in all_removals}
}
if coin.name() not in all_removal_ids and coin.parent_coin_info not in all_removal_ids:
raise ValueError("The specified coin is not a coin in this bundle")
while coin not in non_ephemeral_removals:
coin = next(c for c in all_removals if c.name() == coin.parent_coin_info)
return coin
# This will only return coins that are ancestors of settlement payments
def get_primary_coins(self) -> List[Coin]:
primary_coins: Set[Coin] = set()
for _, coins in self.get_offered_coins().items():
for coin in coins:
primary_coins.add(self.get_root_removal(coin))
return list(primary_coins)
# This returns the minimum coins that when spent will invalidate the rest of the bundle
def get_cancellation_coins(self) -> List[Coin]:
# First, we're going to gather:
dependencies: Dict[bytes32, List[bytes32]] = {} # all of the hashes that each coin depends on
announcements: Dict[bytes32, List[bytes32]] = {} # all of the hashes of the announcement that each coin makes
coin_names: List[bytes32] = [] # The names of all the coins
for spend in [cs for cs in self.bundle.coin_spends if cs.coin not in self.bundle.additions()]:
name = bytes32(spend.coin.name())
coin_names.append(name)
dependencies[name] = []
announcements[name] = []
conditions: Program = spend.puzzle_reveal.run_with_cost(INFINITE_COST, spend.solution)[1]
for condition in conditions.as_iter():
if condition.first() == 60: # create coin announcement
announcements[name].append(Announcement(name, condition.at("rf").as_python()).name())
elif condition.first() == 61: # assert coin announcement
dependencies[name].append(bytes32(condition.at("rf").as_python()))
# We now enter a loop that is attempting to express the following logic:
# "If I am depending on another coin in the same bundle, you may as well cancel that coin instead of me"
# By the end of the loop, we should have filtered down the list of coin_names to include only those that will
# cancel everything else
while True:
removed = detect_dependent_coin(coin_names, dependencies, announcements)
if removed is None:
break
removed_coin, provider = removed
removed_announcements: List[bytes32] = announcements[removed_coin]
remove_these_keys: List[bytes32] = [removed_coin]
while True:
for coin, deps in dependencies.items():
if set(deps) & set(removed_announcements) and coin != provider:
remove_these_keys.append(coin)
removed_announcements = []
for coin in remove_these_keys:
dependencies.pop(coin)
removed_announcements.extend(announcements.pop(coin))
coin_names = [n for n in coin_names if n not in remove_these_keys]
if removed_announcements == []:
break
else:
remove_these_keys = []
return [cs.coin for cs in self.bundle.coin_spends if cs.coin.name() in coin_names]
@classmethod
def aggregate(cls, offers: List[Offer]) -> Offer:
total_requested_payments: Dict[Optional[bytes32], List[NotarizedPayment]] = {}
total_bundle = SpendBundle([], G2Element())
total_driver_dict: Dict[bytes32, PuzzleInfo] = {}
for offer in offers:
# First check for any overlap in inputs
total_inputs: Set[Coin] = {cs.coin for cs in total_bundle.coin_spends}
offer_inputs: Set[Coin] = {cs.coin for cs in offer.bundle.coin_spends}
if total_inputs & offer_inputs:
raise ValueError("The aggregated offers overlap inputs")
# Next, do the aggregation
for asset_id, payments in offer.requested_payments.items():
if asset_id in total_requested_payments:
total_requested_payments[asset_id].extend(payments)
else:
total_requested_payments[asset_id] = payments
for key, value in offer.driver_dict.items():
if key in total_driver_dict and total_driver_dict[key] != value:
raise ValueError(f"The offers to aggregate disagree on the drivers for {key.hex()}")
total_bundle = SpendBundle.aggregate([total_bundle, offer.bundle])
total_driver_dict.update(offer.driver_dict)
return cls(total_requested_payments, total_bundle, total_driver_dict)
# Validity is defined by having enough funds within the offer to satisfy both sides
def is_valid(self) -> bool:
return all([value >= 0 for value in self.arbitrage().values()])
# A "valid" spend means that this bundle can be pushed to the network and will succeed
# This differs from the `to_spend_bundle` method which deliberately creates an invalid SpendBundle
def to_valid_spend(self, arbitrage_ph: Optional[bytes32] = None) -> SpendBundle:
if not self.is_valid():
raise ValueError("Offer is currently incomplete")
completion_spends: List[CoinSpend] = []
all_offered_coins: Dict[Optional[bytes32], List[Coin]] = self.get_offered_coins()
total_arbitrage_amount: Dict[Optional[bytes32], int] = self.arbitrage()
for asset_id, payments in self.requested_payments.items():
offered_coins: List[Coin] = all_offered_coins[asset_id]
# Because of CAT supply laws, we must specify a place for the leftovers to go
arbitrage_amount: int = total_arbitrage_amount[asset_id]
all_payments: List[NotarizedPayment] = payments.copy()
if arbitrage_amount > 0:
assert arbitrage_amount is not None
assert arbitrage_ph is not None
all_payments.append(NotarizedPayment(arbitrage_ph, uint64(arbitrage_amount), []))
# Some assets need to know about siblings so we need to collect all spends first to be able to use them
coin_to_spend_dict: Dict[Coin, CoinSpend] = {}
coin_to_solution_dict: Dict[Coin, Program] = {}
for coin in offered_coins:
parent_spend: CoinSpend = list(
filter(lambda cs: cs.coin.name() == coin.parent_coin_info, self.bundle.coin_spends)
)[0]
coin_to_spend_dict[coin] = parent_spend
inner_solutions = []
if coin == offered_coins[0]:
nonces: List[bytes32] = [p.nonce for p in all_payments]
for nonce in list(dict.fromkeys(nonces)): # dedup without messing with order
nonce_payments: List[NotarizedPayment] = list(filter(lambda p: p.nonce == nonce, all_payments))
inner_solutions.append((nonce, [np.as_condition_args() for np in nonce_payments]))
coin_to_solution_dict[coin] = Program.to(inner_solutions)
for coin in offered_coins:
if asset_id:
if coin.puzzle_hash == construct_puzzle(
self.driver_dict[asset_id], OFFER_MOD_OLD_HASH # type: ignore
).get_tree_hash_precalc(OFFER_MOD_OLD_HASH):
offer_mod: Program = OFFER_MOD_OLD
else:
offer_mod = OFFER_MOD
siblings: str = "("
sibling_spends: str = "("
sibling_puzzles: str = "("
sibling_solutions: str = "("
disassembled_offer_mod: str = disassemble(offer_mod) # type: ignore
for sibling_coin in offered_coins:
if sibling_coin != coin:
siblings += (
"0x"
+ sibling_coin.parent_coin_info.hex()
+ sibling_coin.puzzle_hash.hex()
+ bytes(uint64(sibling_coin.amount)).hex()
+ " "
)
sibling_spends += "0x" + bytes(coin_to_spend_dict[sibling_coin]).hex() + " "
sibling_puzzles += disassembled_offer_mod + " "
sibling_solutions += disassemble(coin_to_solution_dict[sibling_coin]) + " " # type: ignore
siblings += ")"
sibling_spends += ")"
sibling_puzzles += ")"
sibling_solutions += ")"
solution: Program = solve_puzzle(
self.driver_dict[asset_id],
Solver(
{
"coin": "0x"
+ coin.parent_coin_info.hex()
+ coin.puzzle_hash.hex()
+ bytes(uint64(coin.amount)).hex(),
"parent_spend": "0x" + bytes(coin_to_spend_dict[coin]).hex(),
"siblings": siblings,
"sibling_spends": sibling_spends,
"sibling_puzzles": sibling_puzzles,
"sibling_solutions": sibling_solutions,
}
),
offer_mod,
Program.to(coin_to_solution_dict[coin]),
)
else:
if coin.puzzle_hash == OFFER_MOD_OLD_HASH:
offer_mod = OFFER_MOD_OLD
else:
offer_mod = OFFER_MOD
solution = Program.to(coin_to_solution_dict[coin])
completion_spends.append(
CoinSpend(
coin,
construct_puzzle(self.driver_dict[asset_id], offer_mod) if asset_id else offer_mod,
solution,
)
)
return SpendBundle.aggregate([SpendBundle(completion_spends, G2Element()), self.bundle])
def to_spend_bundle(self) -> SpendBundle:
# Before we serialze this as a SpendBundle, we need to serialze the `requested_payments` as dummy CoinSpends
additional_coin_spends: List[CoinSpend] = []
for asset_id, payments in self.requested_payments.items():
puzzle_reveal: Program = construct_puzzle(self.driver_dict[asset_id], OFFER_MOD) if asset_id else OFFER_MOD
inner_solutions = []
nonces: List[bytes32] = [p.nonce for p in payments]
for nonce in list(dict.fromkeys(nonces)): # dedup without messing with order
nonce_payments: List[NotarizedPayment] = list(filter(lambda p: p.nonce == nonce, payments))
inner_solutions.append((nonce, [np.as_condition_args() for np in nonce_payments]))
additional_coin_spends.append(
CoinSpend(
Coin(
ZERO_32,
puzzle_reveal.get_tree_hash(),
uint64(0),
),
puzzle_reveal,
Program.to(inner_solutions),
)
)
return SpendBundle.aggregate(
[
SpendBundle(additional_coin_spends, G2Element()),
self.bundle,
]
)
@classmethod
def from_spend_bundle(cls, bundle: SpendBundle) -> Offer:
# Because of the `to_spend_bundle` method, we need to parse the dummy CoinSpends as `requested_payments`
requested_payments: Dict[Optional[bytes32], List[NotarizedPayment]] = {}
driver_dict: Dict[bytes32, PuzzleInfo] = {}
leftover_coin_spends: List[CoinSpend] = []
for coin_spend in bundle.coin_spends:
driver = match_puzzle(uncurry_puzzle(coin_spend.puzzle_reveal.to_program()))
if driver is not None:
asset_id = create_asset_id(driver)
assert asset_id is not None
driver_dict[asset_id] = driver
else:
asset_id = None
if coin_spend.coin.parent_coin_info == ZERO_32:
notarized_payments: List[NotarizedPayment] = []
for payment_group in coin_spend.solution.to_program().as_iter():
nonce = bytes32(payment_group.first().as_python())
payment_args_list: List[Program] = payment_group.rest().as_iter()
notarized_payments.extend(
[NotarizedPayment.from_condition_and_nonce(condition, nonce) for condition in payment_args_list]
)
requested_payments[asset_id] = notarized_payments
else:
leftover_coin_spends.append(coin_spend)
return cls(requested_payments, SpendBundle(leftover_coin_spends, bundle.aggregated_signature), driver_dict)
def name(self) -> bytes32:
return self.to_spend_bundle().name()
def compress(self, version: Optional[int] = None) -> bytes:
as_spend_bundle = self.to_spend_bundle()
if version is None:
mods: List[bytes] = [bytes(s.puzzle_reveal.to_program().uncurry()[0]) for s in as_spend_bundle.coin_spends]
version = max(lowest_best_version(mods), 5) # 5 is the version where OFFER_MOD lives
return compress_object_with_puzzles(bytes(as_spend_bundle), version)
@classmethod
def from_compressed(cls, compressed_bytes: bytes) -> Offer:
return Offer.from_bytes(decompress_object_with_puzzles(compressed_bytes))
@classmethod
def try_offer_decompression(cls, offer_bytes: bytes) -> Offer:
try:
return cls.from_compressed(offer_bytes)
except TypeError:
pass
return cls.from_bytes(offer_bytes)
def to_bech32(self, prefix: str = "offer", compression_version: Optional[int] = None) -> str:
offer_bytes = self.compress(version=compression_version)
encoded = bech32_encode(prefix, convertbits(list(offer_bytes), 8, 5))
return encoded
@classmethod
def from_bech32(cls, offer_bech32: str) -> Offer:
hrpgot, data = bech32_decode(offer_bech32, max_length=len(offer_bech32))
if data is None:
raise ValueError("Invalid Offer")
decoded = convertbits(list(data), 5, 8, False)
decoded_bytes = bytes(decoded)
return cls.try_offer_decompression(decoded_bytes)
# Methods to make this a valid Streamable member
# We basically hijack the SpendBundle versions for most of it
@classmethod
def parse(cls, f: BinaryIO) -> Offer:
parsed_bundle = SpendBundle.parse(f)
return cls.from_bytes(bytes(parsed_bundle))
def stream(self, f: BinaryIO) -> None:
as_spend_bundle = SpendBundle.from_bytes(bytes(self))
as_spend_bundle.stream(f)
def __bytes__(self) -> bytes:
return bytes(self.to_spend_bundle())
@classmethod
def from_bytes(cls, as_bytes: bytes) -> Offer:
# Because of the __bytes__ method, we need to parse the dummy CoinSpends as `requested_payments`
bundle = SpendBundle.from_bytes(as_bytes)
return cls.from_spend_bundle(bundle)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/settings/default_settings.py | flax/wallet/settings/default_settings.py | from __future__ import annotations
from flax.wallet.settings.settings_objects import BackupInitialized
default_backup_initialized = BackupInitialized(False, False, False, True)
default_settings = {BackupInitialized.__name__: default_backup_initialized}
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/settings/user_settings.py | flax/wallet/settings/user_settings.py | from __future__ import annotations
from typing import Any, Dict
from flax.wallet.key_val_store import KeyValStore
from flax.wallet.settings.default_settings import default_settings
from flax.wallet.settings.settings_objects import BackupInitialized
class UserSettings:
settings: Dict[str, Any]
basic_store: KeyValStore
@staticmethod
async def create(
store: KeyValStore,
name: str = None,
):
self = UserSettings()
self.basic_store = store
self.settings = {}
await self.load_store()
return self
def _keys(self):
all_keys = [BackupInitialized]
return all_keys
async def load_store(self):
keys = self._keys()
for setting in keys:
name = setting.__name__
object = await self.basic_store.get_object(name, BackupInitialized)
if object is None:
object = default_settings[name]
assert object is not None
self.settings[name] = object
async def setting_updated(self, setting: Any):
name = setting.__class__.__name__
await self.basic_store.set_object(name, setting)
self.settings[name] = setting
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/settings/settings_objects.py | flax/wallet/settings/settings_objects.py | from __future__ import annotations
from dataclasses import dataclass
from flax.util.streamable import Streamable, streamable
@streamable
@dataclass(frozen=True)
class BackupInitialized(Streamable):
"""
Stores user decision regarding import of backup info
"""
user_initialized: bool # Stores if user made a selection in UI. (Skip vs Import backup)
user_skipped: bool # Stores if user decided to skip import of backup info
backup_info_imported: bool # Stores if backup info has been imported
new_wallet: bool # Stores if this wallet is newly created / not restored from backup
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/settings/__init__.py | flax/wallet/settings/__init__.py | python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false | |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/flax/wallet/puzzles/p2_delegated_conditions.py | flax/wallet/puzzles/p2_delegated_conditions.py | """
Pay to delegated conditions
In this puzzle program, the solution must be a signed list of conditions, which
is returned literally.
"""
from __future__ import annotations
from flax.types.blockchain_format.program import Program
from .load_clvm import load_clvm_maybe_recompile
MOD = load_clvm_maybe_recompile("p2_delegated_conditions.clvm")
def puzzle_for_pk(public_key: Program) -> Program:
return MOD.curry(public_key)
def solution_for_conditions(conditions: Program) -> Program:
return conditions.to([conditions])
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.