repo stringlengths 7 90 | file_url stringlengths 81 315 | file_path stringlengths 4 228 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 14:38:15 2026-01-05 02:33:18 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tools/cpu_utilization.py | tools/cpu_utilization.py | from __future__ import annotations
import time
from dataclasses import dataclass
from subprocess import check_call
from typing import Dict, List
import click
import psutil
@dataclass(frozen=True)
class Counters:
user_time: int
system_time: int
@click.command()
@click.argument("pid", type=int, required=True)
@click.option(
"--output",
type=str,
default="cpu-usage.log",
help="the file to print CPU usage stats to",
)
@click.option(
"--threads",
is_flag=True,
default=False,
help="Also capture threads counters",
)
def main(pid: int, output: str, threads: bool) -> None:
process = psutil.Process(pid)
stats: Dict[int, Dict[int, Counters]] = {pid: {}}
timestamps: List[float] = []
try:
step = 0
while process.is_running():
timestamps.append(time.perf_counter())
ps = process.cpu_times()
stats[pid][step] = Counters(ps.user, ps.system)
for p in process.children(recursive=True):
try:
ps = p.cpu_times()
if p.pid not in stats:
stats[p.pid] = {}
stats[p.pid][step] = Counters(ps.user, ps.system)
except Exception:
pass
if threads:
for t in process.threads():
try:
if t.id not in stats:
stats[t.id] = {}
stats[t.id][step] = Counters(t.user_time, t.system_time)
except Exception:
pass
time.sleep(0.05)
step += 1
except psutil.NoSuchProcess:
pass
except KeyboardInterrupt:
pass
cols = sorted(stats.items())
start_time = timestamps[0]
with open(output, "w+") as out:
out.write("timestamp ")
for col_id, _ in cols:
out.write(f"{col_id:5d}-user {col_id:6d}-sys ")
out.write("\n")
for row, ts in enumerate(timestamps):
if row == 0:
continue
time_delta = ts - timestamps[row - 1]
out.write(f"{ts-start_time:10f} ")
for _, c in cols:
if row in c and (row - 1) in c:
out.write(f" {(c[row].user_time - c[row - 1].user_time)*100/time_delta:6.2f}% ")
out.write(f" {(c[row].system_time - c[row - 1].system_time)*100/time_delta:6.2f}% ")
else:
out.write(" 0.00% 0.00% ")
row += 1
out.write("\n")
with open("plot-cpu.gnuplot", "w+") as out:
out.write(
f"""
set term png small size 1500, {120*len(cols)}
set output "cpu.png"
set yrange [0:100]
unset xtics
set multiplot layout {len(cols)},1
"""
)
for idx, c2 in enumerate(cols):
if c2[0] == pid:
title = f"pid {c2[0]} (main)"
else:
title = f"pid {c2[0]}"
out.write(f'set ylabel "CPU (%)\\n{title}"\n')
if idx == len(cols) - 1:
out.write('set xlabel "time (s)"\n')
out.write(
f'plot "{output}" using 1:(${idx*2+2}+${idx*2+3}) title "User" with filledcurves y=0, '
f'"{output}" using 1:{idx*2+3} title "System" with filledcurves y=0\n'
)
print('running "gnuplot plot-cpu.gnuplot"')
check_call(["gnuplot", "plot-cpu.gnuplot"])
if __name__ == "__main__":
# pylint: disable = no-value-for-parameter
main()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tools/test_constants.py | tools/test_constants.py | from __future__ import annotations
from flax.consensus.default_constants import DEFAULT_CONSTANTS
test_constants = DEFAULT_CONSTANTS.replace(
**{
"MIN_PLOT_SIZE": 18,
"MIN_BLOCKS_PER_CHALLENGE_BLOCK": 12,
"DIFFICULTY_STARTING": 2**9,
"DISCRIMINANT_SIZE_BITS": 16,
"SUB_EPOCH_BLOCKS": 170,
"WEIGHT_PROOF_THRESHOLD": 2,
"WEIGHT_PROOF_RECENT_BLOCKS": 380,
"DIFFICULTY_CONSTANT_FACTOR": 33554432,
"NUM_SPS_SUB_SLOT": 16, # Must be a power of 2
"MAX_SUB_SLOT_BLOCKS": 50,
"EPOCH_BLOCKS": 340,
"SUB_SLOT_ITERS_STARTING": 2**10, # Must be a multiple of 64
"NUMBER_ZERO_BITS_PLOT_FILTER": 1, # H(plot signature of the challenge) must start with these many zeroes
}
)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tools/manage_clvm.py | tools/manage_clvm.py | from __future__ import annotations
import dataclasses
import os
import pathlib
import sys
import tempfile
import traceback
import typing
import click
import typing_extensions
here = pathlib.Path(__file__).parent.resolve()
root = here.parent
# This is a work-around for fixing imports so they get the appropriate top level
# packages instead of those of the same name in the same directory as this program.
# This undoes the Python mis-feature meant to support 'scripts' that have not been
# installed by adding the script's directory to the import search path. This is why
# it is simpler to just have all code get installed and all things you run be
# accessible via entry points.
sys.path = [path for path in sys.path if path != os.fspath(here)]
from clvm_tools_rs import compile_clvm # noqa: E402
from flax.types.blockchain_format.program import SerializedProgram # noqa: E402
clvm_suffix = ".clvm"
hex_suffix = ".clvm.hex"
hash_suffix = ".clvm.hex.sha256tree"
all_suffixes = {"clvm": clvm_suffix, "hex": hex_suffix, "hash": hash_suffix}
# TODO: could be cli options
top_levels = {"flax"}
def generate_hash_bytes(hex_bytes: bytes) -> bytes:
cleaned_blob = bytes.fromhex(hex_bytes.decode("utf-8"))
serialize_program = SerializedProgram.from_bytes(cleaned_blob)
result = serialize_program.get_tree_hash().hex()
return (result + "\n").encode("utf-8")
@typing_extensions.final
@dataclasses.dataclass(frozen=True)
class ClvmPaths:
clvm: pathlib.Path
hex: pathlib.Path
hash: pathlib.Path
@classmethod
def from_clvm(cls, clvm: pathlib.Path) -> ClvmPaths:
return cls(
clvm=clvm,
hex=clvm.with_name(clvm.name[: -len(clvm_suffix)] + hex_suffix),
hash=clvm.with_name(clvm.name[: -len(clvm_suffix)] + hash_suffix),
)
@typing_extensions.final
@dataclasses.dataclass(frozen=True)
class ClvmBytes:
hex: bytes
hash: bytes
@classmethod
def from_clvm_paths(cls, paths: ClvmPaths) -> ClvmBytes:
return cls(
hex=paths.hex.read_bytes(),
hash=paths.hash.read_bytes(),
)
@classmethod
def from_hex_bytes(cls, hex_bytes: bytes) -> ClvmBytes:
return cls(
hex=hex_bytes,
hash=generate_hash_bytes(hex_bytes=hex_bytes),
)
# These files have the wrong extension for now so we'll just manually exclude them
excludes = {"condition_codes.clvm", "create-lock-puzzlehash.clvm"}
def find_stems(
top_levels: typing.Set[str],
suffixes: typing.Mapping[str, str] = all_suffixes,
) -> typing.Dict[str, typing.Set[pathlib.Path]]:
found_stems = {
name: {
path.with_name(path.name[: -len(suffix)])
for top_level in top_levels
for path in root.joinpath(top_level).rglob(f"**/*{suffix}")
}
for name, suffix in suffixes.items()
}
return found_stems
@click.group()
def main() -> None:
pass
@main.command()
def check() -> int:
used_excludes = set()
overall_fail = False
found_stems = find_stems(top_levels)
for name in ["hex", "hash"]:
found = found_stems[name]
suffix = all_suffixes[name]
extra = found - found_stems["clvm"]
print()
print(f"Extra {suffix} files:")
if len(extra) == 0:
print(" -")
else:
overall_fail = True
for stem in extra:
print(f" {stem.with_name(stem.name + suffix)}")
print()
print("Checking that all existing .clvm files compile to .clvm.hex that match existing caches:")
for stem_path in sorted(found_stems["clvm"]):
clvm_path = stem_path.with_name(stem_path.name + clvm_suffix)
if clvm_path.name in excludes:
used_excludes.add(clvm_path.name)
continue
file_fail = False
error = None
try:
reference_paths = ClvmPaths.from_clvm(clvm=clvm_path)
reference_bytes = ClvmBytes.from_clvm_paths(paths=reference_paths)
with tempfile.TemporaryDirectory() as temporary_directory:
generated_paths = ClvmPaths.from_clvm(
clvm=pathlib.Path(temporary_directory).joinpath(f"generated{clvm_suffix}")
)
compile_clvm(
input_path=os.fspath(reference_paths.clvm),
output_path=os.fspath(generated_paths.hex),
search_paths=[os.fspath(reference_paths.clvm.parent)],
)
generated_bytes = ClvmBytes.from_hex_bytes(hex_bytes=generated_paths.hex.read_bytes())
if generated_bytes != reference_bytes:
file_fail = True
error = f" reference: {reference_bytes!r}\n"
error += f" generated: {generated_bytes!r}"
except Exception:
file_fail = True
error = traceback.format_exc()
if file_fail:
print(f"FAIL : {clvm_path}")
if error is not None:
print(error)
else:
print(f" pass: {clvm_path}")
if file_fail:
overall_fail = True
unused_excludes = sorted(excludes - used_excludes)
if len(unused_excludes) > 0:
overall_fail = True
print()
print("Unused excludes:")
for exclude in unused_excludes:
print(f" {exclude}")
return 1 if overall_fail else 0
@main.command()
def build() -> int:
overall_fail = False
found_stems = find_stems(top_levels, suffixes={"clvm": clvm_suffix})
print(f"Building all existing {clvm_suffix} files to {hex_suffix}:")
for stem_path in sorted(found_stems["clvm"]):
clvm_path = stem_path.with_name(stem_path.name + clvm_suffix)
if clvm_path.name in excludes:
continue
file_fail = False
error = None
try:
reference_paths = ClvmPaths.from_clvm(clvm=clvm_path)
with tempfile.TemporaryDirectory() as temporary_directory:
generated_paths = ClvmPaths.from_clvm(
clvm=pathlib.Path(temporary_directory).joinpath(f"generated{clvm_suffix}")
)
compile_clvm(
input_path=os.fspath(reference_paths.clvm),
output_path=os.fspath(generated_paths.hex),
search_paths=[os.fspath(reference_paths.clvm.parent)],
)
generated_bytes = ClvmBytes.from_hex_bytes(hex_bytes=generated_paths.hex.read_bytes())
reference_paths.hex.write_bytes(generated_bytes.hex)
except Exception:
file_fail = True
error = traceback.format_exc()
if file_fail:
print(f"FAIL : {clvm_path}")
if error is not None:
print(error)
else:
print(f" built: {clvm_path}")
if file_fail:
overall_fail = True
return 1 if overall_fail else 0
sys.exit(main())
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tools/generate_chain.py | tools/generate_chain.py | from __future__ import annotations
import cProfile
import random
import sqlite3
import sys
import time
from contextlib import closing, contextmanager
from pathlib import Path
from typing import Iterator, List, Optional
import click
import zstd
from flax.simulator.block_tools import create_block_tools
from flax.types.blockchain_format.coin import Coin
from flax.types.spend_bundle import SpendBundle
from flax.util.flax_logging import initialize_logging
from flax.util.ints import uint32, uint64
from tests.util.keyring import TempKeyring
from tools.test_constants import test_constants
@contextmanager
def enable_profiler(profile: bool, counter: int) -> Iterator[None]:
if not profile:
yield
return
with cProfile.Profile() as pr:
yield
pr.create_stats()
pr.dump_stats(f"generate-chain-{counter}.profile")
@click.command()
@click.option("--length", type=int, default=None, required=False, help="the number of blocks to generate")
@click.option(
"--fill-rate",
type=int,
default=100,
required=False,
help="the transaction fill rate of blocks. Specified in percent of max block cost",
)
@click.option("--profile", is_flag=True, required=False, default=False, help="dump CPU profile at the end")
@click.option(
"--block-refs",
type=bool,
required=False,
default=True,
help="include a long list of block references in each transaction block",
)
@click.option(
"--output", type=str, required=False, default=None, help="the filename to write the resulting sqlite database to"
)
def main(length: int, fill_rate: int, profile: bool, block_refs: bool, output: Optional[str]) -> None:
if fill_rate < 0 or fill_rate > 100:
print("fill-rate must be within [0, 100]")
sys.exit(1)
if not length:
if block_refs:
# we won't have full reflist until after 512 transaction blocks
length = 1500
else:
# the cost of looking up coins will be deflated because there are so
# few, but a longer chain takes longer to make and test
length = 500
if length <= 0:
print("the output blockchain must have at least length 1")
sys.exit(1)
if output is None:
output = f"stress-test-blockchain-{length}-{fill_rate}{'-refs' if block_refs else ''}.sqlite"
root_path = Path("./test-chain").resolve()
root_path.mkdir(parents=True, exist_ok=True)
with TempKeyring() as keychain:
bt = create_block_tools(constants=test_constants, root_path=root_path, keychain=keychain)
initialize_logging(
"generate_chain", {"log_level": "DEBUG", "log_stdout": False, "log_syslog": False}, root_path=root_path
)
print(f"writing blockchain to {output}")
with closing(sqlite3.connect(output)) as db:
db.execute(
"CREATE TABLE full_blocks("
"header_hash blob PRIMARY KEY,"
"prev_hash blob,"
"height bigint,"
"in_main_chain tinyint,"
"block blob)"
)
wallet = bt.get_farmer_wallet_tool()
farmer_puzzlehash = wallet.get_new_puzzlehash()
pool_puzzlehash = wallet.get_new_puzzlehash()
transaction_blocks: List[uint32] = []
blocks = bt.get_consecutive_blocks(
3,
farmer_reward_puzzle_hash=farmer_puzzlehash,
pool_reward_puzzle_hash=pool_puzzlehash,
keep_going_until_tx_block=True,
genesis_timestamp=uint64(1234567890),
use_timestamp_residual=True,
)
unspent_coins: List[Coin] = []
for b in blocks:
for coin in b.get_included_reward_coins():
if coin.puzzle_hash in [farmer_puzzlehash, pool_puzzlehash]:
unspent_coins.append(coin)
db.execute(
"INSERT INTO full_blocks VALUES(?, ?, ?, ?, ?)",
(
b.header_hash,
b.prev_header_hash,
b.height,
1, # in_main_chain
zstd.compress(bytes(b)),
),
)
db.commit()
b = blocks[-1]
num_tx_per_block = int(1010 * fill_rate / 100)
while True:
with enable_profiler(profile, b.height):
start_time = time.monotonic()
new_coins: List[Coin] = []
spend_bundles: List[SpendBundle] = []
i = 0
for i in range(num_tx_per_block):
if unspent_coins == []:
break
c = unspent_coins.pop(random.randrange(len(unspent_coins)))
receiver = wallet.get_new_puzzlehash()
bundle = wallet.generate_signed_transaction(uint64(c.amount // 2), receiver, c)
new_coins.extend(bundle.additions())
spend_bundles.append(bundle)
block_references: List[uint32]
if block_refs:
block_references = random.sample(transaction_blocks, min(len(transaction_blocks), 512))
random.shuffle(block_references)
else:
block_references = []
farmer_puzzlehash = wallet.get_new_puzzlehash()
pool_puzzlehash = wallet.get_new_puzzlehash()
prev_num_blocks = len(blocks)
blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=farmer_puzzlehash,
pool_reward_puzzle_hash=pool_puzzlehash,
keep_going_until_tx_block=True,
transaction_data=SpendBundle.aggregate(spend_bundles),
previous_generator=block_references,
use_timestamp_residual=True,
)
prev_tx_block = b
prev_block = blocks[-2]
b = blocks[-1]
height = b.height
assert b.is_transaction_block()
transaction_blocks.append(height)
for bl in blocks[prev_num_blocks:]:
for coin in bl.get_included_reward_coins():
unspent_coins.append(coin)
unspent_coins.extend(new_coins)
if b.transactions_info:
actual_fill_rate = b.transactions_info.cost / test_constants.MAX_BLOCK_COST_CLVM
if b.transactions_info.cost > test_constants.MAX_BLOCK_COST_CLVM:
print(f"COST EXCEEDED: {b.transactions_info.cost}")
else:
actual_fill_rate = 0
end_time = time.monotonic()
if prev_tx_block is not None:
assert b.foliage_transaction_block
assert prev_tx_block.foliage_transaction_block
ts = b.foliage_transaction_block.timestamp - prev_tx_block.foliage_transaction_block.timestamp
else:
ts = 0
print(
f"height: {b.height} "
f"spends: {i+1} "
f"refs: {len(block_references)} "
f"fill_rate: {actual_fill_rate*100:.1f}% "
f"new coins: {len(new_coins)} "
f"unspent: {len(unspent_coins)} "
f"difficulty: {b.weight - prev_block.weight} "
f"timestamp: {ts} "
f"time: {end_time - start_time:0.2f}s "
f"tx-block-ratio: {len(transaction_blocks)*100/b.height:0.0f}% "
)
new_blocks = [
(
b.header_hash,
b.prev_header_hash,
b.height,
1, # in_main_chain
zstd.compress(bytes(b)),
)
for b in blocks[prev_num_blocks:]
]
db.executemany("INSERT INTO full_blocks VALUES(?, ?, ?, ?, ?)", new_blocks)
db.commit()
if height >= length:
break
if __name__ == "__main__":
# pylint: disable = no-value-for-parameter
main()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tools/test_full_sync.py | tools/test_full_sync.py | #!/usr/bin/env python3
from __future__ import annotations
import asyncio
import cProfile
import logging
import os
import shutil
import tempfile
import time
from contextlib import contextmanager
from pathlib import Path
from typing import Callable, Iterator, List, Optional
import aiosqlite
import click
import zstd
import flax.server.ws_connection as ws
from flax.cmds.init_funcs import flax_init
from flax.consensus.default_constants import DEFAULT_CONSTANTS
from flax.full_node.full_node import FullNode
from flax.protocols import full_node_protocol
from flax.server.outbound_message import Message, NodeType
from flax.simulator.block_tools import make_unfinished_block
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.full_block import FullBlock
from flax.types.peer_info import PeerInfo
from flax.util.config import load_config
from flax.util.ints import uint16
from tools.test_constants import test_constants as TEST_CONSTANTS
class ExitOnError(logging.Handler):
def __init__(self):
super().__init__()
self.exit_with_failure = False
def emit(self, record):
if record.levelno != logging.ERROR:
return
self.exit_with_failure = True
@contextmanager
def enable_profiler(profile: bool, counter: int) -> Iterator[None]:
if not profile:
yield
return
with cProfile.Profile() as pr:
receive_start_time = time.monotonic()
yield
if time.monotonic() - receive_start_time > 5:
pr.create_stats()
pr.dump_stats(f"slow-batch-{counter:05d}.profile")
class FakeServer:
async def send_to_all(self, messages: List[Message], node_type: NodeType):
pass
async def send_to_all_except(self, messages: List[Message], node_type: NodeType, exclude: bytes32):
pass
def set_received_message_callback(self, callback: Callable):
pass
async def get_peer_info(self) -> Optional[PeerInfo]:
return None
def get_full_node_outgoing_connections(self) -> List[ws.WSFlaxConnection]:
return []
def is_duplicate_or_self_connection(self, target_node: PeerInfo) -> bool:
return False
async def start_client(
self,
target_node: PeerInfo,
on_connect: Callable = None,
auth: bool = False,
is_feeler: bool = False,
) -> bool:
return False
class FakePeer:
def get_peer_logging(self) -> PeerInfo:
return PeerInfo("0.0.0.0", uint16(0))
def __init__(self):
self.peer_node_id = bytes([0] * 32)
async def get_peer_info(self) -> Optional[PeerInfo]:
return None
async def run_sync_test(
file: Path,
db_version,
profile: bool,
single_thread: bool,
test_constants: bool,
keep_up: bool,
db_sync: str,
node_profiler: bool,
start_at_checkpoint: Optional[str],
) -> None:
logger = logging.getLogger()
logger.setLevel(logging.WARNING)
handler = logging.FileHandler("test-full-sync.log")
handler.setFormatter(
logging.Formatter(
"%(levelname)-8s %(message)s",
datefmt="%Y-%m-%dT%H:%M:%S",
)
)
logger.addHandler(handler)
check_log = ExitOnError()
logger.addHandler(check_log)
with tempfile.TemporaryDirectory() as root_dir:
root_path = Path(root_dir)
if start_at_checkpoint is not None:
shutil.copytree(Path(start_at_checkpoint) / ".", root_path, dirs_exist_ok=True)
flax_init(root_path, should_check_keys=False, v1_db=(db_version == 1))
config = load_config(root_path, "config.yaml")
if test_constants:
constants = TEST_CONSTANTS
else:
overrides = config["network_overrides"]["constants"][config["selected_network"]]
constants = DEFAULT_CONSTANTS.replace_str_to_bytes(**overrides)
if single_thread:
config["full_node"]["single_threaded"] = True
config["full_node"]["db_sync"] = db_sync
config["full_node"]["enable_profiler"] = node_profiler
full_node = FullNode(
config["full_node"],
root_path=root_path,
consensus_constants=constants,
)
try:
full_node.set_server(FakeServer()) # type: ignore[arg-type]
await full_node._start()
peak = full_node.blockchain.get_peak()
if peak is not None:
height = int(peak.height)
else:
height = 0
peer: ws.WSFlaxConnection = FakePeer() # type: ignore[assignment]
print()
counter = 0
monotonic = height
prev_hash = None
async with aiosqlite.connect(file) as in_db:
await in_db.execute("pragma query_only")
rows = await in_db.execute(
"SELECT header_hash, height, block FROM full_blocks "
"WHERE height >= ? AND in_main_chain=1 ORDER BY height",
(height,),
)
block_batch = []
start_time = time.monotonic()
logger.warning(f"starting test {start_time}")
worst_batch_height = None
worst_batch_time_per_block = None
async for r in rows:
batch_start_time = time.monotonic()
with enable_profiler(profile, height):
block = FullBlock.from_bytes(zstd.decompress(r[2]))
block_batch.append(block)
assert block.height == monotonic
monotonic += 1
assert prev_hash is None or block.prev_header_hash == prev_hash
prev_hash = block.header_hash
if len(block_batch) < 32:
continue
if keep_up:
for b in block_batch:
await full_node.respond_unfinished_block(
full_node_protocol.RespondUnfinishedBlock(make_unfinished_block(b, constants)), peer
)
await full_node.respond_block(full_node_protocol.RespondBlock(b))
else:
success, summary = await full_node.receive_block_batch(block_batch, peer, None)
end_height = block_batch[-1].height
full_node.blockchain.clean_block_record(end_height - full_node.constants.BLOCKS_CACHE_SIZE)
if not success:
raise RuntimeError("failed to ingest block batch")
assert summary is not None
time_per_block = (time.monotonic() - batch_start_time) / len(block_batch)
if not worst_batch_height or worst_batch_time_per_block > time_per_block:
worst_batch_height = height
worst_batch_time_per_block = time_per_block
counter += len(block_batch)
height += len(block_batch)
print(
f"\rheight {height} {time_per_block:0.2f} s/block ",
end="",
)
block_batch = []
if check_log.exit_with_failure:
raise RuntimeError("error printed to log. exiting")
if counter >= 100000:
counter = 0
print()
end_time = time.monotonic()
logger.warning(f"test completed at {end_time}")
logger.warning(f"duration: {end_time - start_time:0.2f} s")
logger.warning(f"worst time-per-block: {worst_batch_time_per_block:0.2f} s")
logger.warning(f"worst height: {worst_batch_height}")
logger.warning(f"end-height: {height}")
if node_profiler:
(root_path / "profile-node").rename("./profile-node")
finally:
print("closing full node")
full_node._close()
await full_node._await_closed()
@click.group()
def main() -> None:
pass
@main.command("run", short_help="run simulated full sync from an existing blockchain db")
@click.argument("file", type=click.Path(), required=True)
@click.option("--db-version", type=int, required=False, default=2, help="the DB version to use in simulated node")
@click.option("--profile", is_flag=True, required=False, default=False, help="dump CPU profiles for slow batches")
@click.option("--db-sync", type=str, required=False, default="off", help="sqlite sync mode. One of: off, normal, full")
@click.option("--node-profiler", is_flag=True, required=False, default=False, help="enable the built-in node-profiler")
@click.option(
"--test-constants",
is_flag=True,
required=False,
default=False,
help="expect the blockchain database to be blocks using the test constants",
)
@click.option(
"--single-thread",
is_flag=True,
required=False,
default=False,
help="run node in a single process, to include validation in profiles",
)
@click.option(
"--keep-up",
is_flag=True,
required=False,
default=False,
help="pass blocks to the full node as if we're staying synced, rather than syncing",
)
@click.option(
"--start-at-checkpoint",
type=click.Path(),
required=False,
default=None,
help="start test from this specified checkpoint state",
)
def run(
file: Path,
db_version: int,
profile: bool,
single_thread: bool,
test_constants: bool,
keep_up: bool,
db_sync: str,
node_profiler: bool,
start_at_checkpoint: Optional[str],
) -> None:
"""
The FILE parameter should point to an existing blockchain database file (in v2 format)
"""
print(f"PID: {os.getpid()}")
asyncio.run(
run_sync_test(
Path(file),
db_version,
profile,
single_thread,
test_constants,
keep_up,
db_sync,
node_profiler,
start_at_checkpoint,
)
)
@main.command("analyze", short_help="generate call stacks for all profiles dumped to current directory")
def analyze() -> None:
from glob import glob
from shlex import quote
from subprocess import check_call
for input_file in glob("slow-batch-*.profile"):
output = input_file.replace(".profile", ".png")
print(f"{input_file}")
check_call(f"gprof2dot -f pstats {quote(input_file)} | dot -T png >{quote(output)}", shell=True)
@main.command("create-checkpoint", short_help="sync the full node up to specified height and save its state")
@click.argument("file", type=click.Path(), required=True)
@click.argument("out-file", type=click.Path(), required=True)
@click.option("--height", type=int, required=True, help="Sync node up to this height")
def create_checkpoint(file: Path, out_file: Path, height: int) -> None:
"""
The FILE parameter should point to an existing blockchain database file (in v2 format)
"""
asyncio.run(run_sync_checkpoint(Path(file), Path(out_file), height))
async def run_sync_checkpoint(
file: Path,
root_path: Path,
max_height: int,
) -> None:
root_path.mkdir(parents=True, exist_ok=True)
flax_init(root_path, should_check_keys=False, v1_db=False)
config = load_config(root_path, "config.yaml")
overrides = config["network_overrides"]["constants"][config["selected_network"]]
constants = DEFAULT_CONSTANTS.replace_str_to_bytes(**overrides)
config["full_node"]["db_sync"] = "off"
full_node = FullNode(
config["full_node"],
root_path=root_path,
consensus_constants=constants,
)
try:
full_node.set_server(FakeServer()) # type: ignore[arg-type]
await full_node._start()
peer: ws.WSFlaxConnection = FakePeer() # type: ignore[assignment]
print()
height = 0
async with aiosqlite.connect(file) as in_db:
await in_db.execute("pragma query_only")
rows = await in_db.execute(
"SELECT block FROM full_blocks WHERE in_main_chain=1 AND height < ? ORDER BY height", (max_height,)
)
block_batch = []
async for r in rows:
block = FullBlock.from_bytes(zstd.decompress(r[0]))
block_batch.append(block)
if len(block_batch) < 32:
continue
success, _ = await full_node.receive_block_batch(block_batch, peer, None)
end_height = block_batch[-1].height
full_node.blockchain.clean_block_record(end_height - full_node.constants.BLOCKS_CACHE_SIZE)
if not success:
raise RuntimeError("failed to ingest block batch")
height += len(block_batch)
print(f"\rheight {height} ", end="")
block_batch = []
if len(block_batch) > 0:
success, _ = await full_node.receive_block_batch(block_batch, peer, None)
if not success:
raise RuntimeError("failed to ingest block batch")
finally:
print("closing full node")
full_node._close()
await full_node._await_closed()
main.add_command(run)
main.add_command(analyze)
if __name__ == "__main__":
# pylint: disable = no-value-for-parameter
main()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tools/__init__.py | tools/__init__.py | python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false | |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tools/analyze-chain.py | tools/analyze-chain.py | #!/usr/bin/env python3
import sqlite3
import sys
import zstd
import click
from pathlib import Path
from typing import Callable, Optional
from time import time
from chia_rs import run_generator, MEMPOOL_MODE
from flax.types.blockchain_format.program import Program
from flax.consensus.default_constants import DEFAULT_CONSTANTS
from flax.wallet.puzzles.rom_bootstrap_generator import get_generator
from flax.util.full_block_utils import block_info_from_block, generator_from_block
GENERATOR_ROM = bytes(get_generator())
# returns an optional error code and an optional PySpendBundleConditions (from chia_rs)
# exactly one of those will hold a value and the number of seconds it took to
# run
def run_gen(env_data: bytes, block_program_args: bytes, flags: int):
max_cost = DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM
cost_per_byte = DEFAULT_CONSTANTS.COST_PER_BYTE
# we don't charge for the size of the generator ROM. However, we do charge
# cost for the operations it executes
max_cost -= len(env_data) * cost_per_byte
env_data = b"\xff" + env_data + b"\xff" + block_program_args + b"\x80"
try:
start_time = time()
err, result = run_generator(
GENERATOR_ROM,
env_data,
max_cost,
flags,
)
run_time = time() - start_time
return err, result, run_time
except Exception as e:
# GENERATOR_RUNTIME_ERROR
sys.stderr.write(f"Exception: {e}\n")
return 117, None, 0
def callable_for_module_function_path(call: str) -> Callable:
module_name, function_name = call.split(":", 1)
module = __import__(module_name, fromlist=[function_name])
return getattr(module, function_name)
@click.command()
@click.argument("file", type=click.Path(), required=True)
@click.option(
"--mempool-mode", default=False, is_flag=True, help="execute all block generators in the strict mempool mode"
)
@click.option("--start", default=225000, help="first block to examine")
@click.option("--end", default=None, help="last block to examine")
@click.option("--call", default=None, help="function to pass block iterator to in form `module:function`")
def main(file: Path, mempool_mode: bool, start: int, end: Optional[int], call: Optional[str]):
if call is None:
call_f = default_call
else:
call_f = callable_for_module_function_path(call)
c = sqlite3.connect(file)
end_limit_sql = "" if end is None else f"and height <= {end} "
rows = c.execute(
f"SELECT header_hash, height, block FROM full_blocks "
f"WHERE height >= {start} {end_limit_sql} and in_main_chain=1 ORDER BY height"
)
for r in rows:
hh: bytes = r[0]
height: int = r[1]
block = block_info_from_block(zstd.decompress(r[2]))
if block.transactions_generator is None:
sys.stderr.write(f" no-generator. block {height}\r")
continue
start_time = time()
generator_blobs = []
for h in block.transactions_generator_ref_list:
ref = c.execute("SELECT block FROM full_blocks WHERE height=? and in_main_chain=1", (h,))
generator = generator_from_block(zstd.decompress(ref.fetchone()[0]))
assert generator is not None
generator_blobs.append(bytes(generator))
ref.close()
ref_lookup_time = time() - start_time
if mempool_mode:
flags = MEMPOOL_MODE
else:
flags = 0
call_f(block, hh, height, generator_blobs, ref_lookup_time, flags)
def default_call(block, hh, height, generator_blobs, ref_lookup_time, flags):
num_refs = len(generator_blobs)
# add the block program arguments
block_program_args = bytearray(b"\xff")
for ref_block_blob in generator_blobs:
block_program_args += b"\xff"
block_program_args += Program.to(ref_block_blob).as_bin()
block_program_args += b"\x80\x80"
err, result, run_time = run_gen(bytes(block.transactions_generator), bytes(block_program_args), flags)
if err is not None:
sys.stderr.write(f"ERROR: {hh.hex()} {height} {err}\n")
return
num_removals = len(result.spends)
fees = result.reserve_fee
cost = result.cost
num_additions = 0
for spends in result.spends:
num_additions += len(spends.create_coin)
print(
f"{hh.hex()}\t{height:7d}\t{cost:11d}\t{run_time:0.3f}\t{num_refs}\t{ref_lookup_time:0.3f}\t{fees:14}\t"
f"{len(bytes(block.transactions_generator)):6d}\t"
f"{num_removals:4d}\t{num_additions:4d}"
)
if __name__ == "__main__":
# pylint: disable = no-value-for-parameter
main()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tools/run_block.py | tools/run_block.py | #!/usr/bin/env python
"""
run_block: Convert an encoded FullBlock from the Flax blockchain into a list of transactions
As input, takes a file containing a [FullBlock](../flax/types/full_block.py) in json format
```
curl --insecure --cert $config_root/config/ssl/full_node/private_full_node.crt \
--key $config_root/config/ssl/full_node/private_full_node.key \
-d '{ "header_hash": "'$hash'" }' -H "Content-Type: application/json" \
-X POST https://localhost:$port/get_block
$ca_root is the directory containing your current Flax config files
$hash is the header_hash of the [BlockRecord](../flax/consensus/block_record.py)
$port is the Full Node RPC API port
```
The `transactions_generator` and `transactions_generator_ref_list` fields of a `FullBlock`
contain the information necessary to produce transaction record details.
`transactions_generator` is CLVM bytecode
`transactions_generator_ref_list` is a list of block heights as `uint32`
When this CLVM code is run in the correct environment, it produces information that can
then be verified by the consensus rules, or used to view some aspects of transaction history.
The information for each spend is an "NPC" (Name, Puzzle, Condition):
"coin_name": a unique 32 byte identifier
"conditions": a list of condition expressions, as in [condition_opcodes.py](../flax/types/condition_opcodes.py)
"puzzle_hash": the sha256 of the CLVM bytecode that controls spending this coin
Condition Opcodes, such as AGG_SIG_ME, or CREATE_COIN are created by running the "puzzle", i.e. the CLVM bytecode
associated with the coin being spent. Condition Opcodes are verified by every client on the network for every spend,
and in this way they control whether a spend is valid or not.
"""
from __future__ import annotations
import json
from dataclasses import dataclass
from pathlib import Path
from typing import Dict, List, Tuple
import click
from clvm.casts import int_from_bytes
from flax.consensus.constants import ConsensusConstants
from flax.consensus.default_constants import DEFAULT_CONSTANTS
from flax.types.blockchain_format.coin import Coin
from flax.types.blockchain_format.program import SerializedProgram
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.condition_opcodes import ConditionOpcode
from flax.types.condition_with_args import ConditionWithArgs
from flax.types.generator_types import BlockGenerator
from flax.util.config import load_config
from flax.util.default_root import DEFAULT_ROOT_PATH
from flax.util.ints import uint32, uint64
from flax.wallet.cat_wallet.cat_utils import match_cat_puzzle
from flax.wallet.puzzles.load_clvm import load_serialized_clvm_maybe_recompile
from flax.wallet.uncurried_puzzle import uncurry_puzzle
DESERIALIZE_MOD = load_serialized_clvm_maybe_recompile(
"flaxlisp_deserialisation.clvm", package_or_requirement="flax.wallet.puzzles"
)
@dataclass
class NPC:
coin_name: bytes32
puzzle_hash: bytes32
conditions: List[Tuple[ConditionOpcode, List[ConditionWithArgs]]]
@dataclass
class CAT:
asset_id: str
memo: str
npc: NPC
def cat_to_dict(self):
return {"asset_id": self.asset_id, "memo": self.memo, "npc": npc_to_dict(self.npc)}
def condition_with_args_to_dict(condition_with_args: ConditionWithArgs):
return {
"condition_opcode": condition_with_args.opcode.name,
"arguments": [arg.hex() for arg in condition_with_args.vars],
}
def condition_list_to_dict(condition_list: Tuple[ConditionOpcode, List[ConditionWithArgs]]):
assert all([condition_list[0] == cwa.opcode for cwa in condition_list[1]])
return [condition_with_args_to_dict(cwa) for cwa in condition_list[1]]
def npc_to_dict(npc: NPC):
return {
"coin_name": npc.coin_name.hex(),
"conditions": [{"condition_type": c[0].name, "conditions": condition_list_to_dict(c)} for c in npc.conditions],
"puzzle_hash": npc.puzzle_hash.hex(),
}
def run_generator(block_generator: BlockGenerator, constants: ConsensusConstants, max_cost: int) -> List[CAT]:
block_args = [bytes(a) for a in block_generator.generator_refs]
cost, block_result = block_generator.program.run_with_cost(max_cost, DESERIALIZE_MOD, block_args)
coin_spends = block_result.first()
cat_list: List[CAT] = []
for spend in coin_spends.as_iter():
parent, puzzle, amount, solution = spend.as_iter()
args = match_cat_puzzle(uncurry_puzzle(puzzle))
if args is None:
continue
_, asset_id, _ = args
memo = ""
puzzle_result = puzzle.run(solution)
conds: Dict[ConditionOpcode, List[ConditionWithArgs]] = {}
for condition in puzzle_result.as_python():
op = ConditionOpcode(condition[0])
if op not in conds:
conds[op] = []
if condition[0] != ConditionOpcode.CREATE_COIN or len(condition) < 4:
conds[op].append(ConditionWithArgs(op, [i for i in condition[1:3]]))
continue
# If only 3 elements (opcode + 2 args), there is no memo, this is ph, amount
if type(condition[3]) != list:
# If it's not a list, it's not the correct format
conds[op].append(ConditionWithArgs(op, [i for i in condition[1:3]]))
continue
conds[op].append(ConditionWithArgs(op, [i for i in condition[1:3]] + [condition[3][0]]))
# special retirement address
if condition[3][0].hex() != "0000000000000000000000000000000000000000000000000000000000000000":
continue
if len(condition[3]) >= 2:
try:
memo = condition[3][1].decode("utf-8", errors="strict")
except UnicodeError:
pass # ignore this error which should leave memo as empty string
# technically there could be more such create_coin ops in the list but our wallet does not
# so leaving it for the future
break
puzzle_hash = puzzle.get_tree_hash()
coin = Coin(parent.atom, puzzle_hash, int_from_bytes(amount.atom))
cat_list.append(
CAT(
asset_id=bytes(asset_id).hex()[2:],
memo=memo,
npc=NPC(coin.name(), puzzle_hash, [(op, cond) for op, cond in conds.items()]),
)
)
return cat_list
def ref_list_to_args(ref_list: List[uint32], root_path: Path) -> List[SerializedProgram]:
args = []
for height in ref_list:
with open(root_path / f"{height}.json", "rb") as f:
program_str = json.load(f)["block"]["transactions_generator"]
args.append(SerializedProgram.fromhex(program_str))
return args
def run_generator_with_args(
generator_program_hex: str,
generator_args: List[SerializedProgram],
constants: ConsensusConstants,
cost: uint64,
) -> List[CAT]:
if not generator_program_hex:
return []
generator_program = SerializedProgram.fromhex(generator_program_hex)
block_generator = BlockGenerator(generator_program, generator_args, [])
return run_generator(block_generator, constants, min(constants.MAX_BLOCK_COST_CLVM, cost))
@click.command()
@click.argument("filename", type=click.Path(exists=True), default="testnet10.396963.json")
def cmd_run_json_block_file(filename):
"""`file` is a file containing a FullBlock in JSON format"""
return run_json_block_file(Path(filename))
def run_json_block(full_block, parent: Path, constants: ConsensusConstants) -> List[CAT]:
ref_list = full_block["block"]["transactions_generator_ref_list"]
tx_info: dict = full_block["block"]["transactions_info"]
generator_program_hex: str = full_block["block"]["transactions_generator"]
cat_list: List[CAT] = []
if tx_info and generator_program_hex:
cost = tx_info["cost"]
args = ref_list_to_args(ref_list, parent)
cat_list = run_generator_with_args(generator_program_hex, args, constants, cost)
return cat_list
def run_json_block_file(filename: Path):
full_block = json.load(filename.open("rb"))
# pull in current constants from config.yaml
_, constants = get_config_and_constants()
cat_list = run_json_block(full_block, filename.parent.absolute(), constants)
cat_list_json = json.dumps([cat.cat_to_dict() for cat in cat_list])
print(cat_list_json)
def get_config_and_constants():
config = load_config(DEFAULT_ROOT_PATH, "config.yaml")
network = config["selected_network"]
overrides = config["network_overrides"]["constants"][network]
updated_constants = DEFAULT_CONSTANTS.replace_str_to_bytes(**overrides)
return config, updated_constants
if __name__ == "__main__":
cmd_run_json_block_file() # pylint: disable=no-value-for-parameter
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/testconfig.py | tests/testconfig.py | from __future__ import annotations
from typing import TYPE_CHECKING, List, Union
if TYPE_CHECKING:
from typing_extensions import Literal
Oses = Literal["macos", "ubuntu", "windows"]
# Defaults are conservative.
parallel: Union[bool, int, Literal["auto"]] = False
checkout_blocks_and_plots = False
install_timelord = False
check_resource_usage = False
job_timeout = 30
custom_vars: List[str] = []
os_skip: List[Oses] = []
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/check_sql_statements.py | tests/check_sql_statements.py | #!/usr/bin/env python3
from __future__ import annotations
import sys
from subprocess import check_output
from typing import Dict, Set, Tuple
# check for duplicate index names
def check_create(sql_type: str, cwd: str, exemptions: Set[Tuple[str, str]] = set()) -> int:
lines = check_output(["git", "grep", f"CREATE {sql_type}"], cwd=cwd).decode("ascii").split("\n")
ret = 0
items: Dict[str, str] = {}
for line in lines:
if f"CREATE {sql_type}" not in line:
continue
if line.startswith("tests/"):
continue
if "db_upgrade_func.py" in line:
continue
name = line.split(f"CREATE {sql_type}")[1]
if name.startswith(" IF NOT EXISTS"):
name = name[14:]
name = name.strip()
name = name.split()[0]
name = name.split("(")[0]
if name in items:
# these appear as a duplicates, but one is for v1 and the other for v2
if (line.split()[0][:-1], name) not in exemptions:
print(f'duplicate {sql_type} "{name}"\n {items[name]}\n {line}')
ret += 1
items[name] = line
return ret
ret = 0
ret += check_create("INDEX", "flax/wallet")
ret += check_create(
"INDEX",
"flax/full_node",
set(
[
("block_store.py", "is_fully_compactified"),
("block_store.py", "height"),
]
),
)
ret += check_create("TABLE", "flax/wallet")
ret += check_create(
"TABLE",
"flax/full_node",
set(
[
("block_store.py", "sub_epoch_segments_v3"),
("block_store.py", "full_blocks"),
("coin_store.py", "coin_record"),
("hint_store.py", "hints"),
]
),
)
sys.exit(ret)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/connection_utils.py | tests/connection_utils.py | from __future__ import annotations
import asyncio
import logging
from typing import Tuple
import aiohttp
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes, serialization
from flax.protocols.shared_protocol import capabilities, protocol_version
from flax.server.outbound_message import NodeType
from flax.server.server import FlaxServer, ssl_context_for_client
from flax.server.ssl_context import flax_ssl_ca_paths
from flax.server.ws_connection import WSFlaxConnection
from flax.simulator.time_out_assert import time_out_assert
from flax.ssl.create_ssl import generate_ca_signed_cert
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.peer_info import PeerInfo
from flax.util.config import load_config
from flax.util.ints import uint16
log = logging.getLogger(__name__)
async def disconnect_all(server: FlaxServer) -> None:
cons = list(server.all_connections.values())[:]
for con in cons:
await con.close()
async def disconnect_all_and_reconnect(server: FlaxServer, reconnect_to: FlaxServer, self_hostname: str) -> bool:
await disconnect_all(server)
return await server.start_client(PeerInfo(self_hostname, uint16(reconnect_to._port)), None)
async def add_dummy_connection(
server: FlaxServer, self_hostname: str, dummy_port: int, type: NodeType = NodeType.FULL_NODE
) -> Tuple[asyncio.Queue, bytes32]:
timeout = aiohttp.ClientTimeout(total=10)
session = aiohttp.ClientSession(timeout=timeout)
incoming_queue: asyncio.Queue = asyncio.Queue()
config = load_config(server.root_path, "config.yaml")
flax_ca_crt_path, flax_ca_key_path = flax_ssl_ca_paths(server.root_path, config)
dummy_crt_path = server.root_path / "dummy.crt"
dummy_key_path = server.root_path / "dummy.key"
generate_ca_signed_cert(
flax_ca_crt_path.read_bytes(), flax_ca_key_path.read_bytes(), dummy_crt_path, dummy_key_path
)
ssl_context = ssl_context_for_client(flax_ca_crt_path, flax_ca_key_path, dummy_crt_path, dummy_key_path)
pem_cert = x509.load_pem_x509_certificate(dummy_crt_path.read_bytes(), default_backend())
der_cert = x509.load_der_x509_certificate(pem_cert.public_bytes(serialization.Encoding.DER), default_backend())
peer_id = bytes32(der_cert.fingerprint(hashes.SHA256()))
url = f"wss://{self_hostname}:{server._port}/ws"
ws = await session.ws_connect(url, autoclose=True, autoping=True, ssl=ssl_context)
wsc = WSFlaxConnection(
type,
ws,
server._port,
log,
True,
False,
self_hostname,
incoming_queue,
lambda x, y: x,
peer_id,
100,
30,
local_capabilities_for_handshake=capabilities,
)
await wsc.perform_handshake(server._network_id, protocol_version, dummy_port, NodeType.FULL_NODE)
return incoming_queue, peer_id
async def connect_and_get_peer(server_1: FlaxServer, server_2: FlaxServer, self_hostname: str) -> WSFlaxConnection:
"""
Connect server_2 to server_1, and get return the connection in server_1.
"""
await server_2.start_client(PeerInfo(self_hostname, uint16(server_1._port)))
async def connected():
for node_id_c, _ in server_1.all_connections.items():
if node_id_c == server_2.node_id:
return True
return False
await time_out_assert(10, connected, True)
for node_id, wsc in server_1.all_connections.items():
if node_id == server_2.node_id:
return wsc
assert False
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/build-job-matrix.py | tests/build-job-matrix.py | from __future__ import annotations
import argparse
import json
import logging
import types
from pathlib import Path
from typing import Any, Dict, List
import testconfig
root_path = Path(__file__).parent.resolve()
project_root_path = root_path.parent
def skip(path: Path) -> bool:
return any(part.startswith(("_", ".")) for part in path.parts)
def subdirs(per: str) -> List[Path]:
dirs: List[Path]
if per == "directory":
glob_pattern = "**/"
elif per == "file":
glob_pattern = "**/test_*.py"
else:
raise Exception(f"Unrecognized per: {per!r}")
paths = [path for path in root_path.rglob(glob_pattern) if not skip(path=path)]
if per == "directory":
filtered_paths = []
for path in paths:
relative_path = path.relative_to(root_path)
logging.info(f"Considering: {relative_path}")
if len([f for f in path.glob("test_*.py")]) == 0:
logging.info(f"Skipping {relative_path}: no tests collected")
continue
filtered_paths.append(path)
paths = filtered_paths
return sorted(paths)
def module_dict(module: types.ModuleType) -> Dict[str, Any]:
return {k: v for k, v in module.__dict__.items() if not k.startswith("_")}
def dir_config(dir: Path) -> Dict[str, Any]:
import importlib
module_name = ".".join([*dir.relative_to(root_path).parts, "config"])
try:
return module_dict(importlib.import_module(module_name))
except ModuleNotFoundError:
return {}
# Overwrite with directory specific values
def update_config(parent: Dict[str, Any], child: Dict[str, Any]) -> Dict[str, Any]:
if child is None:
return parent
conf = child
for k, v in parent.items():
if k not in child:
conf[k] = v
return conf
# args
arg_parser = argparse.ArgumentParser(description="Generate GitHub test matrix configuration")
arg_parser.add_argument("--per", type=str, choices=["directory", "file"], required=True)
arg_parser.add_argument("--verbose", "-v", action="store_true")
arg_parser.add_argument("--only", action="append", default=[])
arg_parser.add_argument("--duplicates", type=int, default=1)
args = arg_parser.parse_args()
if args.verbose:
logging.basicConfig(format="%(asctime)s:%(message)s", level=logging.DEBUG)
# main
if len(args.only) == 0:
test_paths = subdirs(per=args.per)
else:
test_paths = [root_path.joinpath(path) for path in args.only]
test_paths = [path for path in test_paths for _ in range(args.duplicates)]
configuration = []
for path in test_paths:
if path.is_dir():
test_files = sorted(path.glob("test_*.py"))
test_file_paths = [file.relative_to(project_root_path) for file in test_files]
paths_for_cli = " ".join(path.as_posix() for path in test_file_paths)
else:
paths_for_cli = path.relative_to(project_root_path).as_posix()
conf = update_config(module_dict(testconfig), dir_config(path))
# TODO: design a configurable system for this
process_count = {
"macos": {False: 0, True: 4}.get(conf["parallel"], conf["parallel"]),
"ubuntu": {False: 0, True: 4}.get(conf["parallel"], conf["parallel"]),
"windows": {False: 0, True: 2}.get(conf["parallel"], conf["parallel"]),
}
pytest_parallel_args = {os: f" -n {count}" for os, count in process_count.items()}
for_matrix = {
"check_resource_usage": conf["check_resource_usage"],
"enable_pytest_monitor": "-p monitor" if conf["check_resource_usage"] else "",
"job_timeout": conf["job_timeout"],
"pytest_parallel_args": pytest_parallel_args,
"checkout_blocks_and_plots": conf["checkout_blocks_and_plots"],
"install_timelord": conf["install_timelord"],
"test_files": paths_for_cli,
"name": ".".join(path.relative_to(root_path).with_suffix("").parts),
}
for_matrix = dict(sorted(for_matrix.items()))
configuration.append(for_matrix)
configuration_json = json.dumps(configuration)
for line in json.dumps(configuration, indent=4).splitlines():
logging.info(line)
print(f"{configuration_json}")
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/setup_services.py | tests/setup_services.py | from __future__ import annotations
import asyncio
import logging
import signal
import sqlite3
from pathlib import Path
from secrets import token_bytes
from typing import AsyncGenerator, List, Optional, Tuple
from flax.cmds.init_funcs import init
from flax.consensus.constants import ConsensusConstants
from flax.daemon.server import WebSocketServer, daemon_launch_lock_path
from flax.protocols.shared_protocol import Capability, capabilities
from flax.server.start_farmer import create_farmer_service
from flax.server.start_full_node import create_full_node_service
from flax.server.start_harvester import create_harvester_service
from flax.server.start_introducer import create_introducer_service
from flax.server.start_timelord import create_timelord_service
from flax.server.start_wallet import create_wallet_service
from flax.simulator.block_tools import BlockTools
from flax.simulator.start_simulator import create_full_node_simulator_service
from flax.timelord.timelord_launcher import kill_processes, spawn_process
from flax.types.peer_info import PeerInfo
from flax.util.bech32m import encode_puzzle_hash
from flax.util.config import lock_and_load_config, save_config
from flax.util.ints import uint16
from flax.util.keychain import bytes_to_mnemonic
from flax.util.lock import Lockfile
from tests.util.keyring import TempKeyring
log = logging.getLogger(__name__)
def get_capabilities(disable_capabilities_values: Optional[List[Capability]]) -> List[Tuple[uint16, str]]:
if disable_capabilities_values is not None:
try:
if Capability.BASE in disable_capabilities_values:
# BASE capability cannot be removed
disable_capabilities_values.remove(Capability.BASE)
updated_capabilities = []
for capability in capabilities:
if Capability(int(capability[0])) in disable_capabilities_values:
# "0" means capability is disabled
updated_capabilities.append((capability[0], "0"))
else:
updated_capabilities.append(capability)
return updated_capabilities
except Exception:
logging.getLogger(__name__).exception("Error disabling capabilities, defaulting to all capabilities")
return capabilities.copy()
async def setup_daemon(btools: BlockTools) -> AsyncGenerator[WebSocketServer, None]:
root_path = btools.root_path
config = btools.config
assert "daemon_port" in config
crt_path = root_path / config["daemon_ssl"]["private_crt"]
key_path = root_path / config["daemon_ssl"]["private_key"]
ca_crt_path = root_path / config["private_ssl_ca"]["crt"]
ca_key_path = root_path / config["private_ssl_ca"]["key"]
with Lockfile.create(daemon_launch_lock_path(root_path)):
shutdown_event = asyncio.Event()
ws_server = WebSocketServer(root_path, ca_crt_path, ca_key_path, crt_path, key_path, shutdown_event)
await ws_server.start()
yield ws_server
await ws_server.stop()
async def setup_full_node(
consensus_constants: ConsensusConstants,
db_name: str,
self_hostname: str,
local_bt: BlockTools,
introducer_port=None,
simulator=False,
send_uncompact_interval=0,
sanitize_weight_proof_only=False,
connect_to_daemon=False,
db_version=1,
disable_capabilities: Optional[List[Capability]] = None,
yield_service: bool = False,
):
db_path = local_bt.root_path / f"{db_name}"
if db_path.exists():
db_path.unlink()
if db_version > 1:
with sqlite3.connect(db_path) as connection:
connection.execute("CREATE TABLE database_version(version int)")
connection.execute("INSERT INTO database_version VALUES (?)", (db_version,))
connection.commit()
if connect_to_daemon:
assert local_bt.config["daemon_port"] is not None
config = local_bt.config
service_config = config["full_node"]
service_config["database_path"] = db_name
service_config["send_uncompact_interval"] = send_uncompact_interval
service_config["target_uncompact_proofs"] = 30
service_config["peer_connect_interval"] = 50
service_config["sanitize_weight_proof_only"] = sanitize_weight_proof_only
if introducer_port is not None:
service_config["introducer_peer"]["host"] = self_hostname
service_config["introducer_peer"]["port"] = introducer_port
else:
service_config["introducer_peer"] = None
service_config["dns_servers"] = []
service_config["port"] = 0
service_config["rpc_port"] = 0
config["simulator"]["auto_farm"] = False # Disable Auto Farm for tests
config["simulator"]["use_current_time"] = False # Disable Real timestamps when running tests
overrides = service_config["network_overrides"]["constants"][service_config["selected_network"]]
updated_constants = consensus_constants.replace_str_to_bytes(**overrides)
local_bt.change_config(config)
override_capabilities = None if disable_capabilities is None else get_capabilities(disable_capabilities)
if simulator:
service = create_full_node_simulator_service(
local_bt.root_path,
config,
local_bt,
connect_to_daemon=connect_to_daemon,
override_capabilities=override_capabilities,
)
else:
service = create_full_node_service(
local_bt.root_path,
config,
updated_constants,
connect_to_daemon=connect_to_daemon,
override_capabilities=override_capabilities,
)
await service.start()
# TODO, just always yield the service only and adjust all other places
if yield_service:
yield service
else:
yield service._api
service.stop()
await service.wait_closed()
if db_path.exists():
db_path.unlink()
# Note: convert these setup functions to fixtures, or push it one layer up,
# keeping these usable independently?
async def setup_wallet_node(
self_hostname: str,
consensus_constants: ConsensusConstants,
local_bt: BlockTools,
spam_filter_after_n_txs=200,
xfx_spam_amount=1000000,
full_node_port=None,
introducer_port=None,
key_seed=None,
initial_num_public_keys=5,
yield_service: bool = False,
):
with TempKeyring(populate=True) as keychain:
config = local_bt.config
service_config = config["wallet"]
service_config["port"] = 0
service_config["rpc_port"] = 0
service_config["initial_num_public_keys"] = initial_num_public_keys
service_config["spam_filter_after_n_txs"] = spam_filter_after_n_txs
service_config["xfx_spam_amount"] = xfx_spam_amount
entropy = token_bytes(32)
if key_seed is None:
key_seed = entropy
keychain.add_private_key(bytes_to_mnemonic(key_seed))
first_pk = keychain.get_first_public_key()
assert first_pk is not None
db_path_key_suffix = str(first_pk.get_fingerprint())
db_name = f"test-wallet-db-{full_node_port}-KEY.sqlite"
db_path_replaced: str = db_name.replace("KEY", db_path_key_suffix)
db_path = local_bt.root_path / db_path_replaced
if db_path.exists():
db_path.unlink()
service_config["database_path"] = str(db_name)
service_config["testing"] = True
service_config["introducer_peer"]["host"] = self_hostname
if introducer_port is not None:
service_config["introducer_peer"]["port"] = introducer_port
service_config["peer_connect_interval"] = 10
else:
service_config["introducer_peer"] = None
if full_node_port is not None:
service_config["full_node_peer"] = {}
service_config["full_node_peer"]["host"] = self_hostname
service_config["full_node_peer"]["port"] = full_node_port
else:
del service_config["full_node_peer"]
service = create_wallet_service(
local_bt.root_path,
config,
consensus_constants,
keychain,
connect_to_daemon=False,
)
await service.start()
# TODO, just always yield the service only and adjust all other places
if yield_service:
yield service
else:
yield service._node, service._node.server
service.stop()
await service.wait_closed()
if db_path.exists():
db_path.unlink()
keychain.delete_all_keys()
async def setup_harvester(
b_tools: BlockTools,
root_path: Path,
farmer_peer: Optional[PeerInfo],
consensus_constants: ConsensusConstants,
start_service: bool = True,
):
init(None, root_path)
init(b_tools.root_path / "config" / "ssl" / "ca", root_path)
with lock_and_load_config(root_path, "config.yaml") as config:
config["logging"]["log_stdout"] = True
config["selected_network"] = "testnet0"
config["harvester"]["selected_network"] = "testnet0"
config["harvester"]["port"] = 0
config["harvester"]["rpc_port"] = 0
config["harvester"]["plot_directories"] = [str(b_tools.plot_dir.resolve())]
save_config(root_path, "config.yaml", config)
service = create_harvester_service(
root_path,
config,
consensus_constants,
farmer_peer=farmer_peer,
connect_to_daemon=False,
)
if start_service:
await service.start()
yield service
service.stop()
await service.wait_closed()
async def setup_farmer(
b_tools: BlockTools,
root_path: Path,
self_hostname: str,
consensus_constants: ConsensusConstants,
full_node_port: Optional[uint16] = None,
start_service: bool = True,
port: uint16 = uint16(0),
):
init(None, root_path)
init(b_tools.root_path / "config" / "ssl" / "ca", root_path)
with lock_and_load_config(root_path, "config.yaml") as root_config:
root_config["logging"]["log_stdout"] = True
root_config["selected_network"] = "testnet0"
root_config["farmer"]["selected_network"] = "testnet0"
save_config(root_path, "config.yaml", root_config)
service_config = root_config["farmer"]
config_pool = root_config["pool"]
service_config["xfx_target_address"] = encode_puzzle_hash(b_tools.farmer_ph, "xfx")
service_config["pool_public_keys"] = [bytes(pk).hex() for pk in b_tools.pool_pubkeys]
service_config["port"] = port
service_config["rpc_port"] = uint16(0)
config_pool["xfx_target_address"] = encode_puzzle_hash(b_tools.pool_ph, "xfx")
if full_node_port:
service_config["full_node_peer"]["host"] = self_hostname
service_config["full_node_peer"]["port"] = full_node_port
else:
del service_config["full_node_peer"]
service = create_farmer_service(
root_path,
root_config,
config_pool,
consensus_constants,
b_tools.local_keychain,
connect_to_daemon=False,
)
if start_service:
await service.start()
yield service
service.stop()
await service.wait_closed()
async def setup_introducer(bt: BlockTools, port, yield_service: bool = False):
service = create_introducer_service(
bt.root_path,
bt.config,
advertised_port=port,
connect_to_daemon=False,
)
await service.start()
if yield_service:
yield service
else:
yield service._api, service._node.server
service.stop()
await service.wait_closed()
async def setup_vdf_client(bt: BlockTools, self_hostname: str, port):
lock = asyncio.Lock()
vdf_task_1 = asyncio.create_task(spawn_process(self_hostname, port, 1, lock, bt.config.get("prefer_ipv6")))
def stop():
asyncio.create_task(kill_processes(lock))
asyncio.get_running_loop().add_signal_handler(signal.SIGTERM, stop)
asyncio.get_running_loop().add_signal_handler(signal.SIGINT, stop)
yield vdf_task_1
await kill_processes(lock)
async def setup_vdf_clients(bt: BlockTools, self_hostname: str, port):
lock = asyncio.Lock()
vdf_task_1 = asyncio.create_task(spawn_process(self_hostname, port, 1, lock, bt.config.get("prefer_ipv6")))
vdf_task_2 = asyncio.create_task(spawn_process(self_hostname, port, 2, lock, bt.config.get("prefer_ipv6")))
vdf_task_3 = asyncio.create_task(spawn_process(self_hostname, port, 3, lock, bt.config.get("prefer_ipv6")))
def stop():
asyncio.create_task(kill_processes(lock))
asyncio.get_running_loop().add_signal_handler(signal.SIGTERM, stop)
asyncio.get_running_loop().add_signal_handler(signal.SIGINT, stop)
yield vdf_task_1, vdf_task_2, vdf_task_3
await kill_processes(lock)
async def setup_timelord(
full_node_port,
sanitizer,
consensus_constants: ConsensusConstants,
b_tools: BlockTools,
vdf_port: uint16 = uint16(0),
yield_service: bool = False,
):
config = b_tools.config
service_config = config["timelord"]
service_config["full_node_peer"]["port"] = full_node_port
service_config["bluebox_mode"] = sanitizer
service_config["fast_algorithm"] = False
service_config["vdf_server"]["port"] = vdf_port
service_config["start_rpc_server"] = True
service_config["rpc_port"] = uint16(0)
service = create_timelord_service(
b_tools.root_path,
config,
consensus_constants,
connect_to_daemon=False,
)
await service.start()
if yield_service:
yield service
else:
yield service._api, service._node.server
service.stop()
await service.wait_closed()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/conftest.py | tests/conftest.py | # flake8: noqa E402 # See imports after multiprocessing.set_start_method
import aiohttp
import multiprocessing
import os
from typing import Any, AsyncIterator, Dict, List, Tuple, Union
import pytest
import pytest_asyncio
import tempfile
from flax.full_node.full_node_api import FullNodeAPI
from flax.server.server import FlaxServer
from flax.simulator.full_node_simulator import FullNodeSimulator
from flax.wallet.wallet import Wallet
from typing import Any, AsyncIterator, Dict, List, Tuple, Union
from flax.server.start_service import Service
# Set spawn after stdlib imports, but before other imports
from flax.clvm.spend_sim import SimClient, SpendSim
from flax.full_node.full_node_api import FullNodeAPI
from flax.protocols import full_node_protocol
from flax.server.server import FlaxServer
from flax.simulator.full_node_simulator import FullNodeSimulator
from flax.simulator.simulator_protocol import FarmNewBlockProtocol
from flax.types.peer_info import PeerInfo
from flax.util.config import create_default_flax_config, lock_and_load_config
from flax.util.ints import uint16
from flax.wallet.wallet import Wallet
from tests.core.node_height import node_height_at_least
from tests.setup_nodes import (
setup_simulators_and_wallets,
setup_node_and_wallet,
setup_full_system,
setup_daemon,
setup_n_nodes,
setup_introducer,
setup_timelord,
setup_two_nodes,
)
from tests.simulation.test_simulation import test_constants_modified
from flax.simulator.time_out_assert import time_out_assert
from flax.simulator.wallet_tools import WalletTool
from tests.util.wallet_is_synced import wallet_is_synced
multiprocessing.set_start_method("spawn")
from pathlib import Path
from flax.util.keyring_wrapper import KeyringWrapper
from flax.simulator.block_tools import BlockTools, test_constants, create_block_tools, create_block_tools_async
from tests.util.keyring import TempKeyring
from tests.setup_nodes import setup_farmer_multi_harvester
@pytest.fixture(scope="session")
def get_keychain():
with TempKeyring() as keychain:
yield keychain
KeyringWrapper.cleanup_shared_instance()
@pytest.fixture(scope="session", name="bt")
def block_tools_fixture(get_keychain) -> BlockTools:
# Note that this causes a lot of CPU and disk traffic - disk, DB, ports, process creation ...
_shared_block_tools = create_block_tools(constants=test_constants, keychain=get_keychain)
return _shared_block_tools
# if you have a system that has an unusual hostname for localhost and you want
# to run the tests, change the `self_hostname` fixture
@pytest_asyncio.fixture(scope="session")
def self_hostname():
return "127.0.0.1"
# NOTE:
# Instantiating the bt fixture results in an attempt to create the flax root directory
# which the build scripts symlink to a sometimes-not-there directory.
# When not there, Python complains since, well, the symlink is not a directory nor points to a directory.
#
# Now that we have removed the global at tests.setup_nodes.bt, we can move the imports out of
# the fixtures below. Just be aware of the filesystem modification during bt fixture creation
@pytest_asyncio.fixture(scope="function", params=[1, 2])
async def empty_blockchain(request):
"""
Provides a list of 10 valid blocks, as well as a blockchain with 9 blocks added to it.
"""
from tests.util.blockchain import create_blockchain
from tests.setup_nodes import test_constants
bc1, db_wrapper, db_path = await create_blockchain(test_constants, request.param)
yield bc1
await db_wrapper.close()
bc1.shut_down()
db_path.unlink()
@pytest.fixture(scope="function")
def latest_db_version():
return 2
@pytest.fixture(scope="function", params=[1, 2])
def db_version(request):
return request.param
saved_blocks_version = "rc5"
@pytest.fixture(scope="session")
def default_400_blocks(bt):
from tests.util.blockchain import persistent_blocks
return persistent_blocks(400, f"test_blocks_400_{saved_blocks_version}.db", bt, seed=b"400")
@pytest.fixture(scope="session")
def default_1000_blocks(bt):
from tests.util.blockchain import persistent_blocks
return persistent_blocks(1000, f"test_blocks_1000_{saved_blocks_version}.db", bt, seed=b"1000")
@pytest.fixture(scope="session")
def pre_genesis_empty_slots_1000_blocks(bt):
from tests.util.blockchain import persistent_blocks
return persistent_blocks(
1000,
f"pre_genesis_empty_slots_1000_blocks{saved_blocks_version}.db",
bt,
seed=b"empty_slots",
empty_sub_slots=1,
)
@pytest.fixture(scope="session")
def default_1500_blocks(bt):
from tests.util.blockchain import persistent_blocks
return persistent_blocks(1500, f"test_blocks_1500_{saved_blocks_version}.db", bt, seed=b"1500")
@pytest.fixture(scope="session")
def default_10000_blocks(bt):
from tests.util.blockchain import persistent_blocks
return persistent_blocks(10000, f"test_blocks_10000_{saved_blocks_version}.db", bt, seed=b"10000")
@pytest.fixture(scope="session")
def default_20000_blocks(bt):
from tests.util.blockchain import persistent_blocks
return persistent_blocks(20000, f"test_blocks_20000_{saved_blocks_version}.db", bt, seed=b"20000")
@pytest.fixture(scope="session")
def test_long_reorg_blocks(bt, default_1500_blocks):
from tests.util.blockchain import persistent_blocks
return persistent_blocks(
758,
f"test_blocks_long_reorg_{saved_blocks_version}.db",
bt,
block_list_input=default_1500_blocks[:320],
seed=b"reorg_blocks",
time_per_block=8,
)
@pytest.fixture(scope="session")
def default_2000_blocks_compact(bt):
from tests.util.blockchain import persistent_blocks
return persistent_blocks(
2000,
f"test_blocks_2000_compact_{saved_blocks_version}.db",
bt,
normalized_to_identity_cc_eos=True,
normalized_to_identity_icc_eos=True,
normalized_to_identity_cc_ip=True,
normalized_to_identity_cc_sp=True,
seed=b"2000_compact",
)
@pytest.fixture(scope="session")
def default_10000_blocks_compact(bt):
from tests.util.blockchain import persistent_blocks
return persistent_blocks(
10000,
f"test_blocks_10000_compact_{saved_blocks_version}.db",
bt,
normalized_to_identity_cc_eos=True,
normalized_to_identity_icc_eos=True,
normalized_to_identity_cc_ip=True,
normalized_to_identity_cc_sp=True,
seed=b"1000_compact",
)
@pytest.fixture(scope="function")
def tmp_dir():
with tempfile.TemporaryDirectory() as folder:
yield Path(folder)
# For the below see https://stackoverflow.com/a/62563106/15133773
if os.getenv("_PYTEST_RAISE", "0") != "0":
@pytest.hookimpl(tryfirst=True)
def pytest_exception_interact(call):
raise call.excinfo.value
@pytest.hookimpl(tryfirst=True)
def pytest_internalerror(excinfo):
raise excinfo.value
@pytest_asyncio.fixture(scope="function")
async def wallet_node(self_hostname, request):
params = {}
if request and request.param_index > 0:
params = request.param
async for _ in setup_node_and_wallet(test_constants, self_hostname, **params):
yield _
@pytest_asyncio.fixture(scope="function")
async def node_with_params(request):
params = {}
if request:
params = request.param
async for (sims, wallets, bt) in setup_simulators_and_wallets(1, 0, {}, **params):
yield sims[0]
@pytest_asyncio.fixture(scope="function")
async def two_nodes(db_version, self_hostname):
async for _ in setup_two_nodes(test_constants, db_version=db_version, self_hostname=self_hostname):
yield _
@pytest_asyncio.fixture(scope="function")
async def setup_two_nodes_fixture(db_version):
async for _ in setup_simulators_and_wallets(2, 0, {}, db_version=db_version):
yield _
@pytest_asyncio.fixture(scope="function")
async def three_nodes(db_version, self_hostname):
async for _ in setup_n_nodes(test_constants, 3, db_version=db_version, self_hostname=self_hostname):
yield _
@pytest_asyncio.fixture(scope="function")
async def four_nodes(db_version, self_hostname):
async for _ in setup_n_nodes(test_constants, 4, db_version=db_version, self_hostname=self_hostname):
yield _
@pytest_asyncio.fixture(scope="function")
async def five_nodes(db_version, self_hostname):
async for _ in setup_n_nodes(test_constants, 5, db_version=db_version, self_hostname=self_hostname):
yield _
@pytest_asyncio.fixture(scope="function")
async def wallet_nodes():
async_gen = setup_simulators_and_wallets(2, 1, {"MEMPOOL_BLOCK_BUFFER": 1, "MAX_BLOCK_COST_CLVM": 400000000})
nodes, wallets, bt = await async_gen.__anext__()
full_node_1 = nodes[0]
full_node_2 = nodes[1]
server_1 = full_node_1.full_node.server
server_2 = full_node_2.full_node.server
wallet_a = bt.get_pool_wallet_tool()
wallet_receiver = WalletTool(full_node_1.full_node.constants)
yield full_node_1, full_node_2, server_1, server_2, wallet_a, wallet_receiver, bt
async for _ in async_gen:
yield _
@pytest_asyncio.fixture(scope="function")
async def setup_four_nodes(db_version):
async for _ in setup_simulators_and_wallets(5, 0, {}, db_version=db_version):
yield _
@pytest_asyncio.fixture(scope="function")
async def two_nodes_sim_and_wallets():
async for _ in setup_simulators_and_wallets(2, 0, {}):
yield _
@pytest_asyncio.fixture(scope="function")
async def two_nodes_sim_and_wallets_services():
async for _ in setup_simulators_and_wallets(2, 0, {}, yield_services=True):
yield _
@pytest_asyncio.fixture(scope="function")
async def wallet_node_sim_and_wallet() -> AsyncIterator[
Tuple[List[Union[FullNodeAPI, FullNodeSimulator]], List[Tuple[Wallet, FlaxServer]], BlockTools],
]:
async for _ in setup_simulators_and_wallets(1, 1, {}):
yield _
@pytest_asyncio.fixture(scope="function")
async def one_wallet_and_one_simulator_services():
async for _ in setup_simulators_and_wallets(1, 1, {}, yield_services=True):
yield _
@pytest_asyncio.fixture(scope="function")
async def wallet_node_100_pk():
async for _ in setup_simulators_and_wallets(1, 1, {}, initial_num_public_keys=100):
yield _
@pytest_asyncio.fixture(scope="function")
async def two_wallet_nodes(request):
params = {}
if request and request.param_index > 0:
params = request.param
async for _ in setup_simulators_and_wallets(1, 2, {}, **params):
yield _
@pytest_asyncio.fixture(scope="function")
async def two_wallet_nodes_services() -> AsyncIterator[Tuple[List[Service], List[FullNodeSimulator], BlockTools]]:
async for _ in setup_simulators_and_wallets(1, 2, {}, yield_services=True):
yield _
@pytest_asyncio.fixture(scope="function")
async def two_wallet_nodes_custom_spam_filtering(spam_filter_after_n_txs, xfx_spam_amount):
async for _ in setup_simulators_and_wallets(1, 2, {}, spam_filter_after_n_txs, xfx_spam_amount):
yield _
@pytest_asyncio.fixture(scope="function")
async def three_sim_two_wallets():
async for _ in setup_simulators_and_wallets(3, 2, {}):
yield _
@pytest_asyncio.fixture(scope="function")
async def setup_two_nodes_and_wallet():
async for _ in setup_simulators_and_wallets(2, 1, {}, db_version=2):
yield _
@pytest_asyncio.fixture(scope="function")
async def setup_two_nodes_and_wallet_fast_retry():
async for _ in setup_simulators_and_wallets(
1, 1, {}, config_overrides={"wallet.tx_resend_timeout_secs": 1}, db_version=2
):
yield _
@pytest_asyncio.fixture(scope="function")
async def three_wallet_nodes():
async for _ in setup_simulators_and_wallets(1, 3, {}):
yield _
@pytest_asyncio.fixture(scope="function")
async def two_wallet_nodes_five_freeze():
async for _ in setup_simulators_and_wallets(1, 2, {}):
yield _
@pytest_asyncio.fixture(scope="function")
async def wallet_node_simulator():
async for _ in setup_simulators_and_wallets(1, 1, {}):
yield _
@pytest_asyncio.fixture(scope="function")
async def wallet_two_node_simulator():
async for _ in setup_simulators_and_wallets(2, 1, {}):
yield _
@pytest_asyncio.fixture(scope="module")
async def wallet_nodes_mempool_perf(bt):
key_seed = bt.farmer_master_sk_entropy
async for _ in setup_simulators_and_wallets(2, 1, {}, key_seed=key_seed):
yield _
@pytest_asyncio.fixture(scope="module")
async def wallet_nodes_perf():
async_gen = setup_simulators_and_wallets(1, 1, {"MEMPOOL_BLOCK_BUFFER": 1, "MAX_BLOCK_COST_CLVM": 11000000000})
nodes, wallets, bt = await async_gen.__anext__()
full_node_1 = nodes[0]
server_1 = full_node_1.full_node.server
wallet_a = bt.get_pool_wallet_tool()
wallet_receiver = WalletTool(full_node_1.full_node.constants)
yield full_node_1, server_1, wallet_a, wallet_receiver, bt
async for _ in async_gen:
yield _
@pytest_asyncio.fixture(scope="function")
async def wallet_nodes_mainnet(db_version):
async_gen = setup_simulators_and_wallets(2, 1, {}, db_version=db_version)
nodes, wallets, bt = await async_gen.__anext__()
full_node_1 = nodes[0]
full_node_2 = nodes[1]
server_1 = full_node_1.full_node.server
server_2 = full_node_2.full_node.server
wallet_a = bt.get_pool_wallet_tool()
wallet_receiver = WalletTool(full_node_1.full_node.constants)
yield full_node_1, full_node_2, server_1, server_2, wallet_a, wallet_receiver, bt
async for _ in async_gen:
yield _
@pytest_asyncio.fixture(scope="function")
async def three_nodes_two_wallets():
async for _ in setup_simulators_and_wallets(3, 2, {}):
yield _
@pytest_asyncio.fixture(scope="function")
async def wallet_and_node():
async for _ in setup_simulators_and_wallets(1, 1, {}):
yield _
@pytest_asyncio.fixture(scope="function")
async def one_node_one_block() -> AsyncIterator[Tuple[Union[FullNodeAPI, FullNodeSimulator], FlaxServer, BlockTools]]:
async_gen = setup_simulators_and_wallets(1, 0, {})
nodes, _, bt = await async_gen.__anext__()
full_node_1 = nodes[0]
server_1 = full_node_1.full_node.server
wallet_a = bt.get_pool_wallet_tool()
reward_ph = wallet_a.get_new_puzzlehash()
blocks = bt.get_consecutive_blocks(
1,
guarantee_transaction_block=True,
farmer_reward_puzzle_hash=reward_ph,
pool_reward_puzzle_hash=reward_ph,
genesis_timestamp=10000,
time_per_block=10,
)
assert blocks[0].height == 0
for block in blocks:
await full_node_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
await time_out_assert(60, node_height_at_least, True, full_node_1, blocks[-1].height)
yield full_node_1, server_1, bt
async for _ in async_gen:
yield _
@pytest_asyncio.fixture(scope="function")
async def two_nodes_one_block():
async_gen = setup_simulators_and_wallets(2, 0, {})
nodes, _, bt = await async_gen.__anext__()
full_node_1 = nodes[0]
full_node_2 = nodes[1]
server_1 = full_node_1.full_node.server
server_2 = full_node_2.full_node.server
wallet_a = bt.get_pool_wallet_tool()
reward_ph = wallet_a.get_new_puzzlehash()
blocks = bt.get_consecutive_blocks(
1,
guarantee_transaction_block=True,
farmer_reward_puzzle_hash=reward_ph,
pool_reward_puzzle_hash=reward_ph,
genesis_timestamp=10000,
time_per_block=10,
)
assert blocks[0].height == 0
for block in blocks:
await full_node_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
await time_out_assert(60, node_height_at_least, True, full_node_1, blocks[-1].height)
yield full_node_1, full_node_2, server_1, server_2, bt
async for _ in async_gen:
yield _
@pytest_asyncio.fixture(scope="function")
async def farmer_one_harvester(tmp_path: Path, bt: BlockTools) -> AsyncIterator[Tuple[List[Service], Service]]:
async for _ in setup_farmer_multi_harvester(bt, 1, tmp_path, test_constants, start_services=True):
yield _
@pytest_asyncio.fixture(scope="function")
async def farmer_one_harvester_not_started(
tmp_path: Path, bt: BlockTools
) -> AsyncIterator[Tuple[List[Service], Service]]:
async for _ in setup_farmer_multi_harvester(bt, 1, tmp_path, test_constants, start_services=False):
yield _
@pytest_asyncio.fixture(scope="function")
async def farmer_two_harvester_not_started(
tmp_path: Path, bt: BlockTools
) -> AsyncIterator[Tuple[List[Service], Service]]:
async for _ in setup_farmer_multi_harvester(bt, 2, tmp_path, test_constants, start_services=False):
yield _
@pytest_asyncio.fixture(scope="function")
async def farmer_three_harvester_not_started(
tmp_path: Path, bt: BlockTools
) -> AsyncIterator[Tuple[List[Service], Service]]:
async for _ in setup_farmer_multi_harvester(bt, 3, tmp_path, test_constants, start_services=False):
yield _
# TODO: Ideally, the db_version should be the (parameterized) db_version
# fixture, to test all versions of the database schema. This doesn't work
# because of a hack in shutting down the full node, which means you cannot run
# more than one simulations per process.
@pytest_asyncio.fixture(scope="function")
async def daemon_simulation(bt, get_b_tools, get_b_tools_1):
async for _ in setup_full_system(
test_constants_modified,
bt,
b_tools=get_b_tools,
b_tools_1=get_b_tools_1,
connect_to_daemon=True,
db_version=1,
):
yield _, get_b_tools, get_b_tools_1
@pytest_asyncio.fixture(scope="function")
async def get_daemon(bt):
async for _ in setup_daemon(btools=bt):
yield _
@pytest_asyncio.fixture(scope="function")
async def get_temp_keyring():
with TempKeyring() as keychain:
yield keychain
@pytest_asyncio.fixture(scope="function")
async def get_b_tools_1(get_temp_keyring):
return await create_block_tools_async(constants=test_constants_modified, keychain=get_temp_keyring)
@pytest_asyncio.fixture(scope="function")
async def get_b_tools(get_temp_keyring):
local_b_tools = await create_block_tools_async(constants=test_constants_modified, keychain=get_temp_keyring)
new_config = local_b_tools._config
local_b_tools.change_config(new_config)
return local_b_tools
@pytest_asyncio.fixture(scope="function")
async def daemon_connection_and_temp_keychain(get_b_tools):
async for daemon in setup_daemon(btools=get_b_tools):
keychain = daemon.keychain_server._default_keychain
async with aiohttp.ClientSession() as session:
async with session.ws_connect(
f"wss://127.0.0.1:{get_b_tools._config['daemon_port']}",
autoclose=True,
autoping=True,
heartbeat=60,
ssl=get_b_tools.get_daemon_ssl_context(),
max_msg_size=52428800,
) as ws:
yield ws, keychain
@pytest_asyncio.fixture(scope="function")
async def wallets_prefarm(two_wallet_nodes, self_hostname, trusted):
"""
Sets up the node with 10 blocks, and returns a payer and payee wallet.
"""
farm_blocks = 3
buffer = 1
full_nodes, wallets, _ = two_wallet_nodes
full_node_api = full_nodes[0]
full_node_server = full_node_api.server
wallet_node_0, wallet_server_0 = wallets[0]
wallet_node_1, wallet_server_1 = wallets[1]
wallet_0 = wallet_node_0.wallet_state_manager.main_wallet
wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
if trusted:
wallet_node_0.config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()}
wallet_node_1.config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()}
else:
wallet_node_0.config["trusted_peers"] = {}
wallet_node_1.config["trusted_peers"] = {}
await wallet_server_0.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None)
await wallet_server_1.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None)
wallet_0_rewards = await full_node_api.farm_blocks(count=farm_blocks, wallet=wallet_0)
wallet_1_rewards = await full_node_api.farm_blocks(count=farm_blocks, wallet=wallet_1)
await full_node_api.process_blocks(count=buffer)
await time_out_assert(30, wallet_is_synced, True, wallet_node_0, full_node_api)
await time_out_assert(30, wallet_is_synced, True, wallet_node_1, full_node_api)
assert await wallet_0.get_confirmed_balance() == wallet_0_rewards
assert await wallet_0.get_unconfirmed_balance() == wallet_0_rewards
assert await wallet_1.get_confirmed_balance() == wallet_1_rewards
assert await wallet_1.get_unconfirmed_balance() == wallet_1_rewards
return (wallet_node_0, wallet_0_rewards), (wallet_node_1, wallet_1_rewards), full_node_api
@pytest_asyncio.fixture(scope="function")
async def introducer(bt):
async for _ in setup_introducer(bt, 0):
yield _
@pytest_asyncio.fixture(scope="function")
async def introducer_service(bt):
async for _ in setup_introducer(bt, 0, yield_service=True):
yield _
@pytest_asyncio.fixture(scope="function")
async def timelord(bt):
async for _ in setup_timelord(uint16(0), False, test_constants, bt):
yield _
@pytest_asyncio.fixture(scope="function")
async def timelord_service(bt):
async for _ in setup_timelord(uint16(0), False, test_constants, bt, yield_service=True):
yield _
@pytest_asyncio.fixture(scope="function")
async def setup_sim():
sim = await SpendSim.create()
sim_client = SimClient(sim)
await sim.farm_block()
return sim, sim_client
@pytest.fixture(scope="function")
def tmp_flax_root(tmp_path):
"""
Create a temp directory and populate it with an empty flax_root directory.
"""
path: Path = tmp_path / "flax_root"
path.mkdir(parents=True, exist_ok=True)
return path
@pytest.fixture(scope="function")
def root_path_populated_with_config(tmp_flax_root) -> Path:
"""
Create a temp flax_root directory and populate it with a default config.yaml.
Returns the flax_root path.
"""
root_path: Path = tmp_flax_root
create_default_flax_config(root_path)
return root_path
@pytest.fixture(scope="function")
def config_with_address_prefix(root_path_populated_with_config: Path, prefix: str) -> Dict[str, Any]:
with lock_and_load_config(root_path_populated_with_config, "config.yaml") as config:
if prefix is not None:
config["network_overrides"]["config"][config["selected_network"]]["address_prefix"] = prefix
return config
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/__init__.py | tests/__init__.py | python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false | |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/build-init-files.py | tests/build-init-files.py | #!/usr/bin/env python3
# Create missing `__init__.py` files in the source code folders (in "flax/" and "tests/").
#
# They are required by the python interpreter to properly identify modules/packages so that tools like `mypy` or an IDE
# can work with their full capabilities.
#
# See https://docs.python.org/3/tutorial/modules.html#packages.
#
# Note: This script is run in a `pre-commit` hook (which runs on CI) to make sure we don't miss out any folder.
import logging
import pathlib
import sys
import click
log_levels = {
0: logging.ERROR,
1: logging.WARNING,
2: logging.INFO,
}
ignores = {"__pycache__", ".pytest_cache"}
@click.command()
@click.option(
"-r", "--root", "root_str", type=click.Path(dir_okay=True, file_okay=False, resolve_path=True), default="."
)
@click.option("-v", "--verbose", count=True, help=f"Increase verbosity up to {len(log_levels) - 1} times")
def command(verbose, root_str):
logger = logging.getLogger()
log_level = log_levels.get(verbose, min(log_levels.values()))
logger.setLevel(log_level)
stream_handler = logging.StreamHandler()
logger.addHandler(stream_handler)
tree_roots = ["benchmarks", "build_scripts", "flax", "tests", "tools"]
failed = False
root = pathlib.Path(root_str).resolve()
directories = sorted(
path
for tree_root in tree_roots
for path in root.joinpath(tree_root).rglob("**/")
if all(part not in ignores for part in path.parts)
)
for path in directories:
init_path = path.joinpath("__init__.py")
# This has plenty of race hazards. If it messes up,
# it will likely get caught the next time.
if init_path.is_file() and not init_path.is_symlink():
logger.info(f"Found : {init_path}")
continue
elif not init_path.exists():
failed = True
init_path.touch()
logger.warning(f"Created : {init_path}")
else:
failed = True
logger.error(f"Fail : present but not a regular file: {init_path}", file=sys.stderr)
if failed:
raise click.ClickException("At least one __init__.py created or not a regular file")
command() # pylint: disable=no-value-for-parameter
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/setup_nodes.py | tests/setup_nodes.py | import asyncio
import logging
from typing import AsyncIterator, Dict, List, Tuple, Optional, Union
from pathlib import Path
from flax.consensus.constants import ConsensusConstants
from flax.full_node.full_node import FullNode
from flax.full_node.full_node_api import FullNodeAPI
from flax.protocols.shared_protocol import Capability
from flax.server.server import FlaxServer
from flax.server.start_data_layer import create_data_layer_service
from flax.server.start_service import Service
from flax.simulator.block_tools import BlockTools, create_block_tools_async, test_constants
from flax.simulator.full_node_simulator import FullNodeSimulator
from flax.types.peer_info import PeerInfo
from flax.util.hash import std_hash
from flax.util.ints import uint16, uint32
from flax.wallet.wallet_node import WalletNode
from tests.setup_services import (
setup_daemon,
setup_farmer,
setup_full_node,
setup_harvester,
setup_introducer,
setup_timelord,
setup_vdf_client,
setup_vdf_clients,
setup_wallet_node,
)
from flax.simulator.time_out_assert import time_out_assert_custom_interval
from tests.util.keyring import TempKeyring
from flax.simulator.socket import find_available_listen_port
SimulatorsAndWallets = Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, FlaxServer]], BlockTools]
SimulatorsAndWalletsServices = Tuple[List[Service[FullNode]], List[Service[WalletNode]], BlockTools]
def cleanup_keyring(keyring: TempKeyring):
keyring.cleanup()
log = logging.getLogger(__name__)
def constants_for_dic(dic):
return test_constants.replace(**dic)
async def _teardown_nodes(node_aiters: List) -> None:
awaitables = [node_iter.__anext__() for node_iter in node_aiters]
for sublist_awaitable in asyncio.as_completed(awaitables):
try:
await sublist_awaitable
except StopAsyncIteration:
pass
async def setup_data_layer(local_bt):
# db_path = local_bt.root_path / f"{db_name}"
# if db_path.exists():
# db_path.unlink()
config = local_bt.config["data_layer"]
# config["database_path"] = db_name
# if introducer_port is not None:
# config["introducer_peer"]["host"] = self_hostname
# config["introducer_peer"]["port"] = introducer_port
# else:
# config["introducer_peer"] = None
# config["dns_servers"] = []
# config["rpc_port"] = port + 1000
# overrides = config["network_overrides"]["constants"][config["selected_network"]]
# updated_constants = consensus_constants.replace_str_to_bytes(**overrides)
# if simulator:
# kwargs = service_kwargs_for_full_node_simulator(local_bt.root_path, config, local_bt)
# else:
# kwargs = service_kwargs_for_full_node(local_bt.root_path, config, updated_constants)
service = create_data_layer_service(local_bt.root_path, config, connect_to_daemon=False)
await service.start()
yield service._api
service.stop()
await service.wait_closed()
async def setup_two_nodes(consensus_constants: ConsensusConstants, db_version: int, self_hostname: str):
"""
Setup and teardown of two full nodes, with blockchains and separate DBs.
"""
with TempKeyring(populate=True) as keychain1, TempKeyring(populate=True) as keychain2:
bt1 = await create_block_tools_async(constants=test_constants, keychain=keychain1)
node_iters = [
setup_full_node(
consensus_constants,
"blockchain_test.db",
self_hostname,
bt1,
simulator=False,
db_version=db_version,
),
setup_full_node(
consensus_constants,
"blockchain_test_2.db",
self_hostname,
await create_block_tools_async(constants=test_constants, keychain=keychain2),
simulator=False,
db_version=db_version,
),
]
fn1 = await node_iters[0].__anext__()
fn2 = await node_iters[1].__anext__()
yield fn1, fn2, fn1.full_node.server, fn2.full_node.server, bt1
await _teardown_nodes(node_iters)
async def setup_n_nodes(consensus_constants: ConsensusConstants, n: int, db_version: int, self_hostname: str):
"""
Setup and teardown of n full nodes, with blockchains and separate DBs.
"""
node_iters = []
keyrings_to_cleanup = []
for i in range(n):
keyring = TempKeyring(populate=True)
keyrings_to_cleanup.append(keyring)
node_iters.append(
setup_full_node(
consensus_constants,
f"blockchain_test_{i}.db",
self_hostname,
await create_block_tools_async(constants=test_constants, keychain=keyring.get_keychain()),
simulator=False,
db_version=db_version,
)
)
nodes = []
for ni in node_iters:
nodes.append(await ni.__anext__())
yield nodes
await _teardown_nodes(node_iters)
for keyring in keyrings_to_cleanup:
keyring.cleanup()
async def setup_node_and_wallet(
consensus_constants: ConsensusConstants,
self_hostname: str,
key_seed=None,
db_version=1,
disable_capabilities=None,
):
with TempKeyring(populate=True) as keychain:
btools = await create_block_tools_async(constants=test_constants, keychain=keychain)
node_iters = [
setup_full_node(
consensus_constants,
"blockchain_test.db",
self_hostname,
btools,
simulator=False,
db_version=db_version,
disable_capabilities=disable_capabilities,
),
setup_wallet_node(
btools.config["self_hostname"],
consensus_constants,
btools,
None,
key_seed=key_seed,
),
]
full_node_api = await node_iters[0].__anext__()
wallet, s2 = await node_iters[1].__anext__()
yield full_node_api, wallet, full_node_api.full_node.server, s2, btools
await _teardown_nodes(node_iters)
async def setup_simulators_and_wallets(
simulator_count: int,
wallet_count: int,
dic: Dict,
spam_filter_after_n_txs=200,
xfx_spam_amount=1000000,
*,
key_seed=None,
initial_num_public_keys=5,
db_version=1,
config_overrides: Optional[Dict] = None,
disable_capabilities: Optional[List[Capability]] = None,
yield_services: bool = False,
):
with TempKeyring(populate=True) as keychain1, TempKeyring(populate=True) as keychain2:
simulators: List[Union[FullNodeAPI, Service]] = []
wallets = []
node_iters = []
bt_tools: List[BlockTools] = []
consensus_constants = constants_for_dic(dic)
for index in range(0, simulator_count):
db_name = f"blockchain_test_{index}_sim_and_wallets.db"
bt_tools.append(
await create_block_tools_async(
consensus_constants, const_dict=dic, keychain=keychain1, config_overrides=config_overrides
)
) # block tools modifies constants
sim = setup_full_node(
bt_tools[index].constants,
bt_tools[index].config["self_hostname"],
db_name,
bt_tools[index],
simulator=True,
db_version=db_version,
disable_capabilities=disable_capabilities,
yield_service=yield_services,
)
simulators.append(await sim.__anext__())
node_iters.append(sim)
for index in range(0, wallet_count):
if key_seed is None:
seed = std_hash(uint32(index))
else:
seed = key_seed
if index > (len(bt_tools) - 1):
wallet_bt_tools = await create_block_tools_async(
consensus_constants, const_dict=dic, keychain=keychain2, config_overrides=config_overrides
) # block tools modifies constants
else:
wallet_bt_tools = bt_tools[index]
wlt = setup_wallet_node(
wallet_bt_tools.config["self_hostname"],
wallet_bt_tools.constants,
wallet_bt_tools,
spam_filter_after_n_txs,
xfx_spam_amount,
None,
key_seed=seed,
initial_num_public_keys=initial_num_public_keys,
yield_service=yield_services,
)
wallets.append(await wlt.__anext__())
node_iters.append(wlt)
yield simulators, wallets, bt_tools[0]
await _teardown_nodes(node_iters)
async def setup_farmer_multi_harvester(
block_tools: BlockTools,
harvester_count: int,
temp_dir: Path,
consensus_constants: ConsensusConstants,
*,
start_services: bool,
) -> AsyncIterator[Tuple[List[Service], Service, BlockTools]]:
node_iterators = [
setup_farmer(
block_tools,
temp_dir / "farmer",
block_tools.config["self_hostname"],
consensus_constants,
port=uint16(0),
start_service=start_services,
)
]
farmer_service = await node_iterators[0].__anext__()
if start_services:
farmer_peer = PeerInfo(block_tools.config["self_hostname"], farmer_service._server._port)
else:
farmer_peer = None
for i in range(0, harvester_count):
root_path: Path = temp_dir / f"harvester_{i}"
node_iterators.append(
setup_harvester(
block_tools,
root_path,
farmer_peer,
consensus_constants,
start_service=start_services,
)
)
harvester_services = []
for node in node_iterators[1:]:
harvester_service = await node.__anext__()
harvester_services.append(harvester_service)
yield harvester_services, farmer_service, block_tools
for harvester_service in harvester_services:
harvester_service.stop()
await harvester_service.wait_closed()
farmer_service.stop()
await farmer_service.wait_closed()
await _teardown_nodes(node_iterators)
async def setup_full_system(
consensus_constants: ConsensusConstants,
shared_b_tools: BlockTools,
b_tools: BlockTools = None,
b_tools_1: BlockTools = None,
db_version=1,
connect_to_daemon=False,
):
with TempKeyring(populate=True) as keychain1, TempKeyring(populate=True) as keychain2:
if b_tools is None:
b_tools = await create_block_tools_async(constants=test_constants, keychain=keychain1)
if b_tools_1 is None:
b_tools_1 = await create_block_tools_async(constants=test_constants, keychain=keychain2)
if connect_to_daemon:
daemon_iter = setup_daemon(btools=b_tools)
daemon_ws = await daemon_iter.__anext__()
# Start the introducer first so we can find out the port, and use that for the nodes
introducer_iter = setup_introducer(shared_b_tools, uint16(0))
introducer, introducer_server = await introducer_iter.__anext__()
# Then start the full node so we can use the port for the farmer and timelord
full_node_iters = [
setup_full_node(
consensus_constants,
f"blockchain_test_{i}.db",
shared_b_tools.config["self_hostname"],
b_tools if i == 0 else b_tools_1,
introducer_server._port,
False,
10,
True,
connect_to_daemon=connect_to_daemon,
db_version=db_version,
)
for i in range(2)
]
node_apis = [await fni.__anext__() for fni in full_node_iters]
full_node_0_port = node_apis[0].full_node.server.get_port()
farmer_iter = setup_farmer(
shared_b_tools,
shared_b_tools.root_path / "harvester",
shared_b_tools.config["self_hostname"],
consensus_constants,
full_node_0_port,
)
farmer_service = await farmer_iter.__anext__()
harvester_iter = setup_harvester(
shared_b_tools,
shared_b_tools.root_path / "harvester",
PeerInfo(shared_b_tools.config["self_hostname"], farmer_service._server.get_port()),
consensus_constants,
)
vdf1_port = uint16(find_available_listen_port("vdf1"))
vdf2_port = uint16(find_available_listen_port("vdf2"))
timelord_iter = setup_timelord(full_node_0_port, False, consensus_constants, b_tools, vdf_port=vdf1_port)
timelord_bluebox_iter = setup_timelord(1000, True, consensus_constants, b_tools_1, vdf_port=vdf2_port)
harvester_service = await harvester_iter.__anext__()
harvester = harvester_service._node
async def num_connections():
count = len(harvester.server.all_connections.items())
return count
await time_out_assert_custom_interval(10, 3, num_connections, 1)
node_iters = [
introducer_iter,
harvester_iter,
farmer_iter,
setup_vdf_clients(shared_b_tools, shared_b_tools.config["self_hostname"], vdf1_port),
timelord_iter,
full_node_iters[0],
full_node_iters[1],
setup_vdf_client(shared_b_tools, shared_b_tools.config["self_hostname"], vdf2_port),
timelord_bluebox_iter,
]
if connect_to_daemon:
node_iters.append(daemon_iter)
timelord, _ = await timelord_iter.__anext__()
vdf_clients = await node_iters[3].__anext__()
timelord_bluebox, timelord_bluebox_server = await timelord_bluebox_iter.__anext__()
vdf_bluebox_clients = await node_iters[7].__anext__()
ret = (
node_apis[0],
node_apis[1],
harvester,
farmer_service._node,
introducer,
timelord,
vdf_clients,
vdf_bluebox_clients,
timelord_bluebox,
timelord_bluebox_server,
)
if connect_to_daemon:
yield ret + (daemon_ws,)
else:
yield ret
if connect_to_daemon:
await _teardown_nodes(node_iters[:-1])
await _teardown_nodes([node_iters[-1]])
else:
await _teardown_nodes(node_iters)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/check_pytest_monitor_output.py | tests/check_pytest_monitor_output.py | #!/usr/bin/env python3
from __future__ import annotations
import sys
ret = 0
# example input line
# test_non_tx_aggregate_limits 0.997759588095738 1.45325589179993 554.45703125
for ln in sys.stdin:
line = ln.strip().split()
print(f"{float(line[1]) * 100.0: 8.1f}% CPU {float(line[2]):7.1f}s {float(line[3]): 8.2f} MB RAM {line[0]}")
limit = 800
# until this can be optimized, use higher limits
if "test_duplicate_coin_announces" in line[0]:
limit = 2200
elif (
"test_duplicate_large_integer_substr" in line[0]
or "test_duplicate_reserve_fee" in line[0]
or "test_duplicate_large_integer_negative" in line[0]
or "test_duplicate_large_integer" in line[0]
):
limit = 1100
if float(line[3]) > limit:
print(" ERROR: ^^ exceeded RAM limit ^^ \n")
ret += 1
if ret > 0:
print("some tests used too much RAM")
sys.exit(ret)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/blockchain/test_blockchain.py | tests/blockchain/test_blockchain.py | import logging
import multiprocessing
import time
from dataclasses import replace
from secrets import token_bytes
from typing import List
import pytest
from blspy import AugSchemeMPL, G2Element
from clvm.casts import int_to_bytes
from flax.consensus.block_header_validation import validate_finished_header_block
from flax.consensus.block_rewards import calculate_base_farmer_reward
from flax.consensus.blockchain import ReceiveBlockResult
from flax.consensus.coinbase import create_farmer_coin
from flax.consensus.multiprocess_validation import PreValidationResult
from flax.consensus.pot_iterations import is_overflow_block
from flax.full_node.bundle_tools import detect_potential_template_generator
from flax.full_node.mempool_check_conditions import get_name_puzzle_conditions
from flax.types.blockchain_format.classgroup import ClassgroupElement
from flax.types.blockchain_format.coin import Coin
from flax.types.blockchain_format.foliage import TransactionsInfo
from flax.types.blockchain_format.program import SerializedProgram
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.blockchain_format.slots import InfusedChallengeChainSubSlot
from flax.types.blockchain_format.vdf import VDFInfo, VDFProof
from flax.types.condition_opcodes import ConditionOpcode
from flax.types.condition_with_args import ConditionWithArgs
from flax.types.end_of_slot_bundle import EndOfSubSlotBundle
from flax.types.full_block import FullBlock
from flax.types.generator_types import BlockGenerator
from flax.types.spend_bundle import SpendBundle
from flax.types.unfinished_block import UnfinishedBlock
from flax.util.generator_tools import get_block_header
from flax.util.vdf_prover import get_vdf_info_and_proof
from flax.simulator.block_tools import create_block_tools_async
from flax.util.errors import Err
from flax.util.hash import std_hash
from flax.util.ints import uint8, uint64, uint32
from flax.util.merkle_set import MerkleSet
from flax.util.recursive_replace import recursive_replace
from tests.blockchain.blockchain_test_utils import (
_validate_and_add_block,
_validate_and_add_block_multi_error,
_validate_and_add_block_multi_result,
_validate_and_add_block_no_error,
)
from flax.simulator.wallet_tools import WalletTool
from tests.setup_nodes import test_constants
from tests.util.blockchain import create_blockchain
from tests.util.keyring import TempKeyring
from flax.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle import (
DEFAULT_HIDDEN_PUZZLE_HASH,
calculate_synthetic_secret_key,
)
log = logging.getLogger(__name__)
bad_element = ClassgroupElement.from_bytes(b"\x00")
class TestGenesisBlock:
@pytest.mark.asyncio
async def test_block_tools_proofs_400(self, default_400_blocks):
vdf, proof = get_vdf_info_and_proof(
test_constants, ClassgroupElement.get_default_element(), test_constants.GENESIS_CHALLENGE, uint64(231)
)
if proof.is_valid(test_constants, ClassgroupElement.get_default_element(), vdf) is False:
raise Exception("invalid proof")
@pytest.mark.asyncio
async def test_block_tools_proofs_1000(self, default_1000_blocks):
vdf, proof = get_vdf_info_and_proof(
test_constants, ClassgroupElement.get_default_element(), test_constants.GENESIS_CHALLENGE, uint64(231)
)
if proof.is_valid(test_constants, ClassgroupElement.get_default_element(), vdf) is False:
raise Exception("invalid proof")
@pytest.mark.asyncio
async def test_block_tools_proofs(self):
vdf, proof = get_vdf_info_and_proof(
test_constants, ClassgroupElement.get_default_element(), test_constants.GENESIS_CHALLENGE, uint64(231)
)
if proof.is_valid(test_constants, ClassgroupElement.get_default_element(), vdf) is False:
raise Exception("invalid proof")
@pytest.mark.asyncio
async def test_non_overflow_genesis(self, empty_blockchain, bt):
assert empty_blockchain.get_peak() is None
genesis = bt.get_consecutive_blocks(1, force_overflow=False)[0]
await _validate_and_add_block(empty_blockchain, genesis)
assert empty_blockchain.get_peak().height == 0
@pytest.mark.asyncio
async def test_overflow_genesis(self, empty_blockchain, bt):
genesis = bt.get_consecutive_blocks(1, force_overflow=True)[0]
await _validate_and_add_block(empty_blockchain, genesis)
@pytest.mark.asyncio
async def test_genesis_empty_slots(self, empty_blockchain, bt):
genesis = bt.get_consecutive_blocks(1, force_overflow=False, skip_slots=30)[0]
await _validate_and_add_block(empty_blockchain, genesis)
@pytest.mark.asyncio
async def test_overflow_genesis_empty_slots(self, empty_blockchain, bt):
genesis = bt.get_consecutive_blocks(1, force_overflow=True, skip_slots=3)[0]
await _validate_and_add_block(empty_blockchain, genesis)
@pytest.mark.asyncio
async def test_genesis_validate_1(self, empty_blockchain, bt):
genesis = bt.get_consecutive_blocks(1, force_overflow=False)[0]
bad_prev = bytes([1] * 32)
genesis = recursive_replace(genesis, "foliage.prev_block_hash", bad_prev)
await _validate_and_add_block(empty_blockchain, genesis, expected_error=Err.INVALID_PREV_BLOCK_HASH)
class TestBlockHeaderValidation:
@pytest.mark.asyncio
async def test_long_chain(self, empty_blockchain, default_1000_blocks):
blocks = default_1000_blocks
for block in blocks:
if (
len(block.finished_sub_slots) > 0
and block.finished_sub_slots[0].challenge_chain.subepoch_summary_hash is not None
):
# Sub/Epoch. Try using a bad ssi and difficulty to test 2m and 2n
new_finished_ss = recursive_replace(
block.finished_sub_slots[0],
"challenge_chain.new_sub_slot_iters",
uint64(10000000),
)
block_bad = recursive_replace(
block, "finished_sub_slots", [new_finished_ss] + block.finished_sub_slots[1:]
)
header_block_bad = get_block_header(block_bad, [], [])
_, error = validate_finished_header_block(
empty_blockchain.constants,
empty_blockchain,
header_block_bad,
False,
block.finished_sub_slots[0].challenge_chain.new_difficulty,
block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters,
)
assert error.code == Err.INVALID_NEW_SUB_SLOT_ITERS
# Also fails calling the outer methods, but potentially with a different error
await _validate_and_add_block(
empty_blockchain, block_bad, expected_result=ReceiveBlockResult.INVALID_BLOCK
)
new_finished_ss_2 = recursive_replace(
block.finished_sub_slots[0],
"challenge_chain.new_difficulty",
uint64(10000000),
)
block_bad_2 = recursive_replace(
block, "finished_sub_slots", [new_finished_ss_2] + block.finished_sub_slots[1:]
)
header_block_bad_2 = get_block_header(block_bad_2, [], [])
_, error = validate_finished_header_block(
empty_blockchain.constants,
empty_blockchain,
header_block_bad_2,
False,
block.finished_sub_slots[0].challenge_chain.new_difficulty,
block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters,
)
assert error.code == Err.INVALID_NEW_DIFFICULTY
# Also fails calling the outer methods, but potentially with a different error
await _validate_and_add_block(
empty_blockchain, block_bad_2, expected_result=ReceiveBlockResult.INVALID_BLOCK
)
# 3c
new_finished_ss_3: EndOfSubSlotBundle = recursive_replace(
block.finished_sub_slots[0],
"challenge_chain.subepoch_summary_hash",
bytes([0] * 32),
)
new_finished_ss_3 = recursive_replace(
new_finished_ss_3,
"reward_chain.challenge_chain_sub_slot_hash",
new_finished_ss_3.challenge_chain.get_hash(),
)
log.warning(f"Number of slots: {len(block.finished_sub_slots)}")
block_bad_3 = recursive_replace(
block, "finished_sub_slots", [new_finished_ss_3] + block.finished_sub_slots[1:]
)
header_block_bad_3 = get_block_header(block_bad_3, [], [])
_, error = validate_finished_header_block(
empty_blockchain.constants,
empty_blockchain,
header_block_bad_3,
False,
block.finished_sub_slots[0].challenge_chain.new_difficulty,
block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters,
)
assert error.code == Err.INVALID_SUB_EPOCH_SUMMARY
# Also fails calling the outer methods, but potentially with a different error
await _validate_and_add_block(
empty_blockchain, block_bad_3, expected_result=ReceiveBlockResult.INVALID_BLOCK
)
# 3d
new_finished_ss_4 = recursive_replace(
block.finished_sub_slots[0],
"challenge_chain.subepoch_summary_hash",
std_hash(b"123"),
)
new_finished_ss_4 = recursive_replace(
new_finished_ss_4,
"reward_chain.challenge_chain_sub_slot_hash",
new_finished_ss_4.challenge_chain.get_hash(),
)
block_bad_4 = recursive_replace(
block, "finished_sub_slots", [new_finished_ss_4] + block.finished_sub_slots[1:]
)
header_block_bad_4 = get_block_header(block_bad_4, [], [])
_, error = validate_finished_header_block(
empty_blockchain.constants,
empty_blockchain,
header_block_bad_4,
False,
block.finished_sub_slots[0].challenge_chain.new_difficulty,
block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters,
)
assert error.code == Err.INVALID_SUB_EPOCH_SUMMARY
# Also fails calling the outer methods, but potentially with a different error
await _validate_and_add_block(
empty_blockchain, block_bad_4, expected_result=ReceiveBlockResult.INVALID_BLOCK
)
await _validate_and_add_block(empty_blockchain, block)
log.info(
f"Added block {block.height} total iters {block.total_iters} "
f"new slot? {len(block.finished_sub_slots)}"
)
assert empty_blockchain.get_peak().height == len(blocks) - 1
@pytest.mark.asyncio
async def test_unfinished_blocks(self, empty_blockchain, bt):
blockchain = empty_blockchain
blocks = bt.get_consecutive_blocks(3)
for block in blocks[:-1]:
await _validate_and_add_block(empty_blockchain, block)
block = blocks[-1]
unf = UnfinishedBlock(
block.finished_sub_slots,
block.reward_chain_block.get_unfinished(),
block.challenge_chain_sp_proof,
block.reward_chain_sp_proof,
block.foliage,
block.foliage_transaction_block,
block.transactions_info,
block.transactions_generator,
[],
)
npc_result = None
if unf.transactions_generator is not None:
block_generator: BlockGenerator = await blockchain.get_block_generator(unf)
block_bytes = bytes(unf)
npc_result = await blockchain.run_generator(block_bytes, block_generator)
validate_res = await blockchain.validate_unfinished_block(unf, npc_result, False)
err = validate_res.error
assert err is None
await _validate_and_add_block(empty_blockchain, block)
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, force_overflow=True)
block = blocks[-1]
unf = UnfinishedBlock(
block.finished_sub_slots,
block.reward_chain_block.get_unfinished(),
block.challenge_chain_sp_proof,
block.reward_chain_sp_proof,
block.foliage,
block.foliage_transaction_block,
block.transactions_info,
block.transactions_generator,
[],
)
npc_result = None
if unf.transactions_generator is not None:
block_generator: BlockGenerator = await blockchain.get_block_generator(unf)
block_bytes = bytes(unf)
npc_result = await blockchain.run_generator(block_bytes, block_generator)
validate_res = await blockchain.validate_unfinished_block(unf, npc_result, False)
assert validate_res.error is None
@pytest.mark.asyncio
async def test_empty_genesis(self, empty_blockchain, bt):
for block in bt.get_consecutive_blocks(2, skip_slots=3):
await _validate_and_add_block(empty_blockchain, block)
@pytest.mark.asyncio
async def test_empty_slots_non_genesis(self, empty_blockchain, bt):
blockchain = empty_blockchain
blocks = bt.get_consecutive_blocks(10)
for block in blocks:
await _validate_and_add_block(empty_blockchain, block)
blocks = bt.get_consecutive_blocks(10, skip_slots=2, block_list_input=blocks)
for block in blocks[10:]:
await _validate_and_add_block(empty_blockchain, block)
assert blockchain.get_peak().height == 19
@pytest.mark.asyncio
async def test_one_sb_per_slot(self, empty_blockchain, bt):
blockchain = empty_blockchain
num_blocks = 20
blocks = []
for i in range(num_blocks):
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, skip_slots=1)
await _validate_and_add_block(empty_blockchain, blocks[-1])
assert blockchain.get_peak().height == num_blocks - 1
@pytest.mark.asyncio
async def test_all_overflow(self, empty_blockchain, bt):
blockchain = empty_blockchain
num_rounds = 5
blocks = []
num_blocks = 0
for i in range(1, num_rounds):
num_blocks += i
blocks = bt.get_consecutive_blocks(i, block_list_input=blocks, skip_slots=1, force_overflow=True)
for block in blocks[-i:]:
await _validate_and_add_block(empty_blockchain, block)
assert blockchain.get_peak().height == num_blocks - 1
@pytest.mark.asyncio
async def test_unf_block_overflow(self, empty_blockchain, bt):
blockchain = empty_blockchain
blocks = []
while True:
# This creates an overflow block, then a normal block, and then an overflow in the next sub-slot
# blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, force_overflow=True)
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks)
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, force_overflow=True)
await _validate_and_add_block(blockchain, blocks[-2])
sb_1 = blockchain.block_record(blocks[-2].header_hash)
sb_2_next_ss = blocks[-1].total_iters - blocks[-2].total_iters < sb_1.sub_slot_iters
# We might not get a normal block for sb_2, and we might not get them in the right slots
# So this while loop keeps trying
if sb_1.overflow and sb_2_next_ss:
block = blocks[-1]
unf = UnfinishedBlock(
[],
block.reward_chain_block.get_unfinished(),
block.challenge_chain_sp_proof,
block.reward_chain_sp_proof,
block.foliage,
block.foliage_transaction_block,
block.transactions_info,
block.transactions_generator,
[],
)
npc_result = None
if block.transactions_generator is not None:
block_generator: BlockGenerator = await blockchain.get_block_generator(unf)
block_bytes = bytes(unf)
npc_result = await blockchain.run_generator(block_bytes, block_generator)
validate_res = await blockchain.validate_unfinished_block(
unf, npc_result, skip_overflow_ss_validation=True
)
assert validate_res.error is None
return None
await _validate_and_add_block(blockchain, blocks[-1])
@pytest.mark.asyncio
async def test_one_sb_per_two_slots(self, empty_blockchain, bt):
blockchain = empty_blockchain
num_blocks = 20
blocks = []
for i in range(num_blocks): # Same thing, but 2 sub-slots per block
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, skip_slots=2)
await _validate_and_add_block(blockchain, blocks[-1])
assert blockchain.get_peak().height == num_blocks - 1
@pytest.mark.asyncio
async def test_one_sb_per_five_slots(self, empty_blockchain, bt):
blockchain = empty_blockchain
num_blocks = 10
blocks = []
for i in range(num_blocks): # Same thing, but 5 sub-slots per block
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, skip_slots=5)
await _validate_and_add_block(blockchain, blocks[-1])
assert blockchain.get_peak().height == num_blocks - 1
@pytest.mark.asyncio
async def test_basic_chain_overflow(self, empty_blockchain, bt):
blocks = bt.get_consecutive_blocks(5, force_overflow=True)
for block in blocks:
await _validate_and_add_block(empty_blockchain, block)
assert empty_blockchain.get_peak().height == len(blocks) - 1
@pytest.mark.asyncio
async def test_one_sb_per_two_slots_force_overflow(self, empty_blockchain, bt):
blockchain = empty_blockchain
num_blocks = 10
blocks = []
for i in range(num_blocks):
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, skip_slots=2, force_overflow=True)
await _validate_and_add_block(blockchain, blocks[-1])
assert blockchain.get_peak().height == num_blocks - 1
@pytest.mark.asyncio
async def test_invalid_prev(self, empty_blockchain, bt):
# 1
blocks = bt.get_consecutive_blocks(2, force_overflow=False)
await _validate_and_add_block(empty_blockchain, blocks[0])
block_1_bad = recursive_replace(blocks[-1], "foliage.prev_block_hash", bytes([0] * 32))
await _validate_and_add_block(empty_blockchain, block_1_bad, expected_error=Err.INVALID_PREV_BLOCK_HASH)
@pytest.mark.asyncio
async def test_invalid_pospace(self, empty_blockchain, bt):
# 2
blocks = bt.get_consecutive_blocks(2, force_overflow=False)
await _validate_and_add_block(empty_blockchain, blocks[0])
block_1_bad = recursive_replace(blocks[-1], "reward_chain_block.proof_of_space.proof", bytes([0] * 32))
await _validate_and_add_block(empty_blockchain, block_1_bad, expected_error=Err.INVALID_POSPACE)
@pytest.mark.asyncio
async def test_invalid_sub_slot_challenge_hash_genesis(self, empty_blockchain, bt):
# 2a
blocks = bt.get_consecutive_blocks(1, force_overflow=False, skip_slots=1)
new_finished_ss = recursive_replace(
blocks[0].finished_sub_slots[0],
"challenge_chain.challenge_chain_end_of_slot_vdf.challenge",
bytes([2] * 32),
)
block_0_bad = recursive_replace(
blocks[0], "finished_sub_slots", [new_finished_ss] + blocks[0].finished_sub_slots[1:]
)
header_block_bad = get_block_header(block_0_bad, [], [])
_, error = validate_finished_header_block(
empty_blockchain.constants,
empty_blockchain,
header_block_bad,
False,
empty_blockchain.constants.DIFFICULTY_STARTING,
empty_blockchain.constants.SUB_SLOT_ITERS_STARTING,
)
assert error.code == Err.INVALID_PREV_CHALLENGE_SLOT_HASH
await _validate_and_add_block(empty_blockchain, block_0_bad, expected_result=ReceiveBlockResult.INVALID_BLOCK)
@pytest.mark.asyncio
async def test_invalid_sub_slot_challenge_hash_non_genesis(self, empty_blockchain, bt):
# 2b
blocks = bt.get_consecutive_blocks(1, force_overflow=False, skip_slots=0)
blocks = bt.get_consecutive_blocks(1, force_overflow=False, skip_slots=1, block_list_input=blocks)
new_finished_ss = recursive_replace(
blocks[1].finished_sub_slots[0],
"challenge_chain.challenge_chain_end_of_slot_vdf.challenge",
bytes([2] * 32),
)
block_1_bad = recursive_replace(
blocks[1], "finished_sub_slots", [new_finished_ss] + blocks[1].finished_sub_slots[1:]
)
await _validate_and_add_block(empty_blockchain, blocks[0])
header_block_bad = get_block_header(block_1_bad, [], [])
_, error = validate_finished_header_block(
empty_blockchain.constants,
empty_blockchain,
header_block_bad,
False,
blocks[1].finished_sub_slots[0].challenge_chain.new_difficulty,
blocks[1].finished_sub_slots[0].challenge_chain.new_sub_slot_iters,
)
assert error.code == Err.INVALID_PREV_CHALLENGE_SLOT_HASH
await _validate_and_add_block(empty_blockchain, block_1_bad, expected_result=ReceiveBlockResult.INVALID_BLOCK)
@pytest.mark.asyncio
async def test_invalid_sub_slot_challenge_hash_empty_ss(self, empty_blockchain, bt):
# 2c
blocks = bt.get_consecutive_blocks(1, force_overflow=False, skip_slots=0)
blocks = bt.get_consecutive_blocks(1, force_overflow=False, skip_slots=2, block_list_input=blocks)
new_finished_ss = recursive_replace(
blocks[1].finished_sub_slots[-1],
"challenge_chain.challenge_chain_end_of_slot_vdf.challenge",
bytes([2] * 32),
)
block_1_bad = recursive_replace(
blocks[1], "finished_sub_slots", blocks[1].finished_sub_slots[:-1] + [new_finished_ss]
)
await _validate_and_add_block(empty_blockchain, blocks[0])
header_block_bad = get_block_header(block_1_bad, [], [])
_, error = validate_finished_header_block(
empty_blockchain.constants,
empty_blockchain,
header_block_bad,
False,
blocks[1].finished_sub_slots[0].challenge_chain.new_difficulty,
blocks[1].finished_sub_slots[0].challenge_chain.new_sub_slot_iters,
)
assert error.code == Err.INVALID_PREV_CHALLENGE_SLOT_HASH
await _validate_and_add_block(empty_blockchain, block_1_bad, expected_result=ReceiveBlockResult.INVALID_BLOCK)
@pytest.mark.asyncio
async def test_genesis_no_icc(self, empty_blockchain, bt):
# 2d
blocks = bt.get_consecutive_blocks(1, force_overflow=False, skip_slots=1)
new_finished_ss = recursive_replace(
blocks[0].finished_sub_slots[0],
"infused_challenge_chain",
InfusedChallengeChainSubSlot(
VDFInfo(
bytes([0] * 32),
uint64(1200),
ClassgroupElement.get_default_element(),
)
),
)
block_0_bad = recursive_replace(
blocks[0], "finished_sub_slots", [new_finished_ss] + blocks[0].finished_sub_slots[1:]
)
await _validate_and_add_block(empty_blockchain, block_0_bad, expected_error=Err.SHOULD_NOT_HAVE_ICC)
async def do_test_invalid_icc_sub_slot_vdf(self, keychain, db_version):
bt_high_iters = await create_block_tools_async(
constants=test_constants.replace(SUB_SLOT_ITERS_STARTING=(2**12), DIFFICULTY_STARTING=(2**14)),
keychain=keychain,
)
bc1, db_wrapper, db_path = await create_blockchain(bt_high_iters.constants, db_version)
blocks = bt_high_iters.get_consecutive_blocks(10)
for block in blocks:
if len(block.finished_sub_slots) > 0 and block.finished_sub_slots[-1].infused_challenge_chain is not None:
# Bad iters
new_finished_ss = recursive_replace(
block.finished_sub_slots[-1],
"infused_challenge_chain",
InfusedChallengeChainSubSlot(
replace(
block.finished_sub_slots[
-1
].infused_challenge_chain.infused_challenge_chain_end_of_slot_vdf,
number_of_iterations=10000000,
)
),
)
block_bad = recursive_replace(
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss]
)
await _validate_and_add_block(bc1, block_bad, expected_error=Err.INVALID_ICC_EOS_VDF)
# Bad output
new_finished_ss_2 = recursive_replace(
block.finished_sub_slots[-1],
"infused_challenge_chain",
InfusedChallengeChainSubSlot(
replace(
block.finished_sub_slots[
-1
].infused_challenge_chain.infused_challenge_chain_end_of_slot_vdf,
output=ClassgroupElement.get_default_element(),
)
),
)
log.warning(f"Proof: {block.finished_sub_slots[-1].proofs}")
block_bad_2 = recursive_replace(
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_2]
)
await _validate_and_add_block(bc1, block_bad_2, expected_error=Err.INVALID_ICC_EOS_VDF)
# Bad challenge hash
new_finished_ss_3 = recursive_replace(
block.finished_sub_slots[-1],
"infused_challenge_chain",
InfusedChallengeChainSubSlot(
replace(
block.finished_sub_slots[
-1
].infused_challenge_chain.infused_challenge_chain_end_of_slot_vdf,
challenge=bytes([0] * 32),
)
),
)
block_bad_3 = recursive_replace(
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_3]
)
await _validate_and_add_block(bc1, block_bad_3, expected_error=Err.INVALID_ICC_EOS_VDF)
# Bad proof
new_finished_ss_5 = recursive_replace(
block.finished_sub_slots[-1],
"proofs.infused_challenge_chain_slot_proof",
VDFProof(uint8(0), b"1239819023890", False),
)
block_bad_5 = recursive_replace(
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_5]
)
await _validate_and_add_block(bc1, block_bad_5, expected_error=Err.INVALID_ICC_EOS_VDF)
await _validate_and_add_block(bc1, block)
await db_wrapper.close()
bc1.shut_down()
db_path.unlink()
@pytest.mark.asyncio
async def test_invalid_icc_sub_slot_vdf(self, db_version):
with TempKeyring() as keychain:
await self.do_test_invalid_icc_sub_slot_vdf(keychain, db_version)
@pytest.mark.asyncio
async def test_invalid_icc_into_cc(self, empty_blockchain, bt):
blockchain = empty_blockchain
blocks = bt.get_consecutive_blocks(1)
await _validate_and_add_block(blockchain, blocks[0])
case_1, case_2 = False, False
while not case_1 or not case_2:
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, skip_slots=1)
block = blocks[-1]
if len(block.finished_sub_slots) > 0 and block.finished_sub_slots[-1].infused_challenge_chain is not None:
if block.finished_sub_slots[-1].reward_chain.deficit == test_constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK:
# 2g
case_1 = True
new_finished_ss = recursive_replace(
block.finished_sub_slots[-1],
"challenge_chain",
replace(
block.finished_sub_slots[-1].challenge_chain,
infused_challenge_chain_sub_slot_hash=bytes([1] * 32),
),
)
else:
# 2h
case_2 = True
new_finished_ss = recursive_replace(
block.finished_sub_slots[-1],
"challenge_chain",
replace(
block.finished_sub_slots[-1].challenge_chain,
infused_challenge_chain_sub_slot_hash=block.finished_sub_slots[
-1
].infused_challenge_chain.get_hash(),
),
)
block_bad = recursive_replace(
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss]
)
header_block_bad = get_block_header(block_bad, [], [])
_, error = validate_finished_header_block(
empty_blockchain.constants,
empty_blockchain,
header_block_bad,
False,
block.finished_sub_slots[0].challenge_chain.new_difficulty,
block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters,
)
assert error.code == Err.INVALID_ICC_HASH_CC
await _validate_and_add_block(blockchain, block_bad, expected_result=ReceiveBlockResult.INVALID_BLOCK)
# 2i
new_finished_ss_bad_rc = recursive_replace(
block.finished_sub_slots[-1],
"reward_chain",
replace(block.finished_sub_slots[-1].reward_chain, infused_challenge_chain_sub_slot_hash=None),
)
block_bad = recursive_replace(
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_bad_rc]
)
await _validate_and_add_block(blockchain, block_bad, expected_error=Err.INVALID_ICC_HASH_RC)
elif len(block.finished_sub_slots) > 0 and block.finished_sub_slots[-1].infused_challenge_chain is None:
# 2j
new_finished_ss_bad_cc = recursive_replace(
block.finished_sub_slots[-1],
"challenge_chain",
replace(
block.finished_sub_slots[-1].challenge_chain,
infused_challenge_chain_sub_slot_hash=bytes([1] * 32),
),
)
block_bad = recursive_replace(
block, "finished_sub_slots", block.finished_sub_slots[:-1] + [new_finished_ss_bad_cc]
)
await _validate_and_add_block(blockchain, block_bad, expected_error=Err.INVALID_ICC_HASH_CC)
# 2k
new_finished_ss_bad_rc = recursive_replace(
block.finished_sub_slots[-1],
"reward_chain",
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | true |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/blockchain/test_blockchain_transactions.py | tests/blockchain/test_blockchain_transactions.py | import logging
import pytest
from clvm.casts import int_to_bytes
from flax.protocols import full_node_protocol, wallet_protocol
from flax.types.announcement import Announcement
from flax.types.condition_opcodes import ConditionOpcode
from flax.types.condition_with_args import ConditionWithArgs
from flax.types.spend_bundle import SpendBundle
from flax.util.errors import ConsensusError, Err
from flax.util.ints import uint64
from tests.blockchain.blockchain_test_utils import _validate_and_add_block
from tests.setup_nodes import test_constants
from tests.util.generator_tools_testing import run_and_get_removals_and_additions
from flax.simulator.wallet_tools import WalletTool
BURN_PUZZLE_HASH = b"0" * 32
WALLET_A = WalletTool(test_constants)
WALLET_A_PUZZLE_HASHES = [WALLET_A.get_new_puzzlehash() for _ in range(5)]
log = logging.getLogger(__name__)
class TestBlockchainTransactions:
@pytest.mark.asyncio
async def test_basic_blockchain_tx(self, two_nodes):
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
full_node_1 = full_node_api_1.full_node
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block), None)
spend_block = blocks[2]
spend_coin = None
for coin in list(spend_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin = coin
spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_puzzlehash, spend_coin)
assert spend_bundle is not None
tx: wallet_protocol.SendTransaction = wallet_protocol.SendTransaction(spend_bundle)
await full_node_api_1.send_transaction(tx)
sb = full_node_1.mempool_manager.get_spendbundle(spend_bundle.name())
assert sb is spend_bundle
last_block = blocks[-1]
next_spendbundle, additions, removals = await full_node_1.mempool_manager.create_bundle_from_mempool(
last_block.header_hash
)
assert next_spendbundle is not None
new_blocks = bt.get_consecutive_blocks(
1,
block_list_input=blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=next_spendbundle,
guarantee_transaction_block=True,
)
next_block = new_blocks[-1]
await full_node_1.respond_block(full_node_protocol.RespondBlock(next_block))
assert next_block.header_hash == full_node_1.blockchain.get_peak().header_hash
added_coins = next_spendbundle.additions()
# Two coins are added, main spend and change
assert len(added_coins) == 2
for coin in added_coins:
unspent = await full_node_1.coin_store.get_coin_record(coin.name())
assert unspent is not None
assert not unspent.spent
assert not unspent.coinbase
@pytest.mark.asyncio
async def test_validate_blockchain_with_double_spend(self, two_nodes):
num_blocks = 5
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
full_node_1 = full_node_api_1.full_node
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
spend_block = blocks[2]
spend_coin = None
for coin in list(spend_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin = coin
spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_puzzlehash, spend_coin)
spend_bundle_double = wallet_a.generate_signed_transaction(1001, receiver_puzzlehash, spend_coin)
block_spendbundle = SpendBundle.aggregate([spend_bundle, spend_bundle_double])
new_blocks = bt.get_consecutive_blocks(
1,
block_list_input=blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=block_spendbundle,
guarantee_transaction_block=True,
)
next_block = new_blocks[-1]
await _validate_and_add_block(full_node_1.blockchain, next_block, expected_error=Err.DOUBLE_SPEND)
@pytest.mark.asyncio
async def test_validate_blockchain_duplicate_output(self, two_nodes):
num_blocks = 3
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
full_node_1 = full_node_api_1.full_node
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
spend_block = blocks[2]
spend_coin = None
for coin in list(spend_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin = coin
spend_bundle = wallet_a.generate_signed_transaction(
1000, receiver_puzzlehash, spend_coin, additional_outputs=[(receiver_puzzlehash, 1000)]
)
new_blocks = bt.get_consecutive_blocks(
1,
block_list_input=blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=spend_bundle,
guarantee_transaction_block=True,
)
next_block = new_blocks[-1]
await _validate_and_add_block(full_node_1.blockchain, next_block, expected_error=Err.DUPLICATE_OUTPUT)
@pytest.mark.asyncio
async def test_validate_blockchain_with_reorg_double_spend(self, two_nodes):
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
spend_block = blocks[2]
spend_coin = None
for coin in list(spend_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin = coin
spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_puzzlehash, spend_coin)
blocks_spend = bt.get_consecutive_blocks(
1,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
guarantee_transaction_block=True,
transaction_data=spend_bundle,
)
# Move chain to height 10, with a spend at height 10
for block in blocks_spend:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Reorg at height 5, add up to and including height 12
new_blocks = bt.get_consecutive_blocks(
7,
blocks[:6],
farmer_reward_puzzle_hash=coinbase_puzzlehash,
guarantee_transaction_block=True,
seed=b"another seed",
)
for block in new_blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Spend the same coin in the new reorg chain at height 13
new_blocks = bt.get_consecutive_blocks(
1,
new_blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
guarantee_transaction_block=True,
transaction_data=spend_bundle,
)
await _validate_and_add_block(full_node_api_1.full_node.blockchain, new_blocks[-1])
# But can't spend it twice
new_blocks_double = bt.get_consecutive_blocks(
1,
new_blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
guarantee_transaction_block=True,
transaction_data=spend_bundle,
)
await _validate_and_add_block(
full_node_api_1.full_node.blockchain, new_blocks_double[-1], expected_error=Err.DOUBLE_SPEND
)
# Now test Reorg at block 5, same spend at block height 12
new_blocks_reorg = bt.get_consecutive_blocks(
1,
new_blocks[:12],
farmer_reward_puzzle_hash=coinbase_puzzlehash,
guarantee_transaction_block=True,
transaction_data=spend_bundle,
seed=b"spend at 12 is ok",
)
for block in new_blocks_reorg:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Spend at height 13 is also OK (same height)
new_blocks_reorg = bt.get_consecutive_blocks(
1,
new_blocks[:13],
farmer_reward_puzzle_hash=coinbase_puzzlehash,
guarantee_transaction_block=True,
transaction_data=spend_bundle,
seed=b"spend at 13 is ok",
)
for block in new_blocks_reorg:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Spend at height 14 is not OK (already spend)
new_blocks_reorg = bt.get_consecutive_blocks(
1,
new_blocks[:14],
farmer_reward_puzzle_hash=coinbase_puzzlehash,
guarantee_transaction_block=True,
transaction_data=spend_bundle,
seed=b"spend at 14 is double spend",
)
with pytest.raises(ConsensusError):
for block in new_blocks_reorg:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
@pytest.mark.asyncio
async def test_validate_blockchain_spend_reorg_coin(self, two_nodes):
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_1_puzzlehash = WALLET_A_PUZZLE_HASHES[1]
receiver_2_puzzlehash = WALLET_A_PUZZLE_HASHES[2]
receiver_3_puzzlehash = WALLET_A_PUZZLE_HASHES[3]
full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
spend_block = blocks[2]
spend_coin = None
for coin in list(spend_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin = coin
assert spend_coin
spend_bundle = wallet_a.generate_signed_transaction(uint64(1000), receiver_1_puzzlehash, spend_coin)
new_blocks = bt.get_consecutive_blocks(
1,
blocks[:5],
seed=b"spend_reorg_coin",
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=spend_bundle,
guarantee_transaction_block=True,
)
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(new_blocks[-1]))
coin_2 = None
for coin in run_and_get_removals_and_additions(
new_blocks[-1],
test_constants.MAX_BLOCK_COST_CLVM,
cost_per_byte=test_constants.COST_PER_BYTE,
)[1]:
if coin.puzzle_hash == receiver_1_puzzlehash:
coin_2 = coin
break
assert coin_2 is not None
spend_bundle = wallet_a.generate_signed_transaction(uint64(1000), receiver_2_puzzlehash, coin_2)
new_blocks = bt.get_consecutive_blocks(
1,
new_blocks[:6],
seed=b"spend_reorg_coin",
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=spend_bundle,
guarantee_transaction_block=True,
)
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(new_blocks[-1]))
coin_3 = None
for coin in run_and_get_removals_and_additions(
new_blocks[-1],
test_constants.MAX_BLOCK_COST_CLVM,
cost_per_byte=test_constants.COST_PER_BYTE,
)[1]:
if coin.puzzle_hash == receiver_2_puzzlehash:
coin_3 = coin
break
assert coin_3 is not None
spend_bundle = wallet_a.generate_signed_transaction(uint64(1000), receiver_3_puzzlehash, coin_3)
new_blocks = bt.get_consecutive_blocks(
1,
new_blocks[:7],
seed=b"spend_reorg_coin",
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=spend_bundle,
guarantee_transaction_block=True,
)
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(new_blocks[-1]))
@pytest.mark.asyncio
async def test_validate_blockchain_spend_reorg_cb_coin(self, two_nodes):
num_blocks = 15
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_1_puzzlehash = WALLET_A_PUZZLE_HASHES[1]
full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
blocks = bt.get_consecutive_blocks(num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash)
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Spends a coinbase created in reorg
new_blocks = bt.get_consecutive_blocks(
5,
blocks[:6],
seed=b"reorg cb coin",
farmer_reward_puzzle_hash=coinbase_puzzlehash,
guarantee_transaction_block=True,
)
for block in new_blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
spend_block = new_blocks[-1]
spend_coin = None
for coin in list(spend_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin = coin
spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_1_puzzlehash, spend_coin)
new_blocks = bt.get_consecutive_blocks(
1,
new_blocks,
seed=b"reorg cb coin",
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=spend_bundle,
guarantee_transaction_block=True,
)
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(new_blocks[-1]))
@pytest.mark.asyncio
async def test_validate_blockchain_spend_reorg_since_genesis(self, two_nodes):
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_1_puzzlehash = WALLET_A_PUZZLE_HASHES[1]
full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
spend_block = blocks[-1]
spend_coin = None
for coin in list(spend_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin = coin
spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_1_puzzlehash, spend_coin)
new_blocks = bt.get_consecutive_blocks(
1, blocks, seed=b"", farmer_reward_puzzle_hash=coinbase_puzzlehash, transaction_data=spend_bundle
)
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(new_blocks[-1]))
# Spends a coin in a genesis reorg, that was already spent
new_blocks = bt.get_consecutive_blocks(
12,
[],
seed=b"reorg since genesis",
farmer_reward_puzzle_hash=coinbase_puzzlehash,
guarantee_transaction_block=True,
)
for block in new_blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
new_blocks = bt.get_consecutive_blocks(
1,
new_blocks,
seed=b"reorg since genesis",
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=spend_bundle,
)
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(new_blocks[-1]))
@pytest.mark.asyncio
async def test_assert_my_coin_id(self, two_nodes):
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
full_node_1 = full_node_api_1.full_node
# Farm blocks
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Coinbase that gets spent
spend_block = blocks[2]
bad_block = blocks[3]
spend_coin = None
bad_spend_coin = None
for coin in list(spend_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin = coin
for coin in list(bad_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
bad_spend_coin = coin
valid_cvp = ConditionWithArgs(ConditionOpcode.ASSERT_MY_COIN_ID, [spend_coin.name()])
valid_dic = {valid_cvp.opcode: [valid_cvp]}
bad_cvp = ConditionWithArgs(ConditionOpcode.ASSERT_MY_COIN_ID, [bad_spend_coin.name()])
bad_dic = {bad_cvp.opcode: [bad_cvp]}
bad_spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_puzzlehash, spend_coin, bad_dic)
valid_spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_puzzlehash, spend_coin, valid_dic)
assert bad_spend_bundle is not None
assert valid_spend_bundle is not None
# Invalid block bundle
# Create another block that includes our transaction
invalid_new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=bad_spend_bundle,
guarantee_transaction_block=True,
)
# Try to validate that block
await _validate_and_add_block(
full_node_1.blockchain, invalid_new_blocks[-1], expected_error=Err.ASSERT_MY_COIN_ID_FAILED
)
# Valid block bundle
# Create another block that includes our transaction
new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=valid_spend_bundle,
guarantee_transaction_block=True,
)
await _validate_and_add_block(full_node_1.blockchain, new_blocks[-1])
@pytest.mark.asyncio
async def test_assert_coin_announcement_consumed(self, two_nodes):
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
full_node_1 = full_node_api_1.full_node
# Farm blocks
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Coinbase that gets spent
block1 = blocks[2]
block2 = blocks[3]
spend_coin_block_1 = None
spend_coin_block_2 = None
for coin in list(block1.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin_block_1 = coin
for coin in list(block2.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin_block_2 = coin
# This condition requires block2 coinbase to be spent
block1_cvp = ConditionWithArgs(
ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT,
[Announcement(spend_coin_block_2.name(), b"test").name()],
)
block1_dic = {block1_cvp.opcode: [block1_cvp]}
block1_spend_bundle = wallet_a.generate_signed_transaction(
1000, receiver_puzzlehash, spend_coin_block_1, block1_dic
)
# This condition requires block1 coinbase to be spent
block2_cvp = ConditionWithArgs(
ConditionOpcode.CREATE_COIN_ANNOUNCEMENT,
[b"test"],
)
block2_dic = {block2_cvp.opcode: [block2_cvp]}
block2_spend_bundle = wallet_a.generate_signed_transaction(
1000, receiver_puzzlehash, spend_coin_block_2, block2_dic
)
# Invalid block bundle
assert block1_spend_bundle is not None
# Create another block that includes our transaction
invalid_new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=block1_spend_bundle,
guarantee_transaction_block=True,
)
# Try to validate that block
await _validate_and_add_block(
full_node_1.blockchain, invalid_new_blocks[-1], expected_error=Err.ASSERT_ANNOUNCE_CONSUMED_FAILED
)
# bundle_together contains both transactions
bundle_together = SpendBundle.aggregate([block1_spend_bundle, block2_spend_bundle])
# Create another block that includes our transaction
new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=bundle_together,
guarantee_transaction_block=True,
)
# Try to validate newly created block
await _validate_and_add_block(full_node_1.blockchain, new_blocks[-1])
@pytest.mark.asyncio
async def test_assert_puzzle_announcement_consumed(self, two_nodes):
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
full_node_1 = full_node_api_1.full_node
# Farm blocks
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Coinbase that gets spent
block1 = blocks[2]
block2 = blocks[3]
spend_coin_block_1 = None
spend_coin_block_2 = None
for coin in list(block1.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin_block_1 = coin
for coin in list(block2.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin_block_2 = coin
# This condition requires block2 coinbase to be spent
block1_cvp = ConditionWithArgs(
ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT,
[Announcement(spend_coin_block_2.puzzle_hash, b"test").name()],
)
block1_dic = {block1_cvp.opcode: [block1_cvp]}
block1_spend_bundle = wallet_a.generate_signed_transaction(
1000, receiver_puzzlehash, spend_coin_block_1, block1_dic
)
# This condition requires block1 coinbase to be spent
block2_cvp = ConditionWithArgs(
ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT,
[b"test"],
)
block2_dic = {block2_cvp.opcode: [block2_cvp]}
block2_spend_bundle = wallet_a.generate_signed_transaction(
1000, receiver_puzzlehash, spend_coin_block_2, block2_dic
)
# Invalid block bundle
assert block1_spend_bundle is not None
# Create another block that includes our transaction
invalid_new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=block1_spend_bundle,
guarantee_transaction_block=True,
)
# Try to validate that block
await _validate_and_add_block(
full_node_1.blockchain, invalid_new_blocks[-1], expected_error=Err.ASSERT_ANNOUNCE_CONSUMED_FAILED
)
# bundle_together contains both transactions
bundle_together = SpendBundle.aggregate([block1_spend_bundle, block2_spend_bundle])
# Create another block that includes our transaction
new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=bundle_together,
guarantee_transaction_block=True,
)
# Try to validate newly created block
await _validate_and_add_block(full_node_1.blockchain, new_blocks[-1])
@pytest.mark.asyncio
async def test_assert_height_absolute(self, two_nodes):
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
full_node_1 = full_node_api_1.full_node
# Farm blocks
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Coinbase that gets spent
block1 = blocks[2]
spend_coin_block_1 = None
for coin in list(block1.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin_block_1 = coin
# This condition requires block1 coinbase to be spent after index 10
block1_cvp = ConditionWithArgs(ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE, [int_to_bytes(10)])
block1_dic = {block1_cvp.opcode: [block1_cvp]}
block1_spend_bundle = wallet_a.generate_signed_transaction(
1000, receiver_puzzlehash, spend_coin_block_1, block1_dic
)
# program that will be sent too early
assert block1_spend_bundle is not None
invalid_new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=block1_spend_bundle,
guarantee_transaction_block=True,
)
# Try to validate that block at index 10
await _validate_and_add_block(
full_node_1.blockchain, invalid_new_blocks[-1], expected_error=Err.ASSERT_HEIGHT_ABSOLUTE_FAILED
)
new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
guarantee_transaction_block=True,
)
await _validate_and_add_block(full_node_1.blockchain, new_blocks[-1])
# At index 11, it can be spent
new_blocks = bt.get_consecutive_blocks(
1,
new_blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=block1_spend_bundle,
guarantee_transaction_block=True,
)
await _validate_and_add_block(full_node_1.blockchain, new_blocks[-1])
@pytest.mark.asyncio
async def test_assert_height_relative(self, two_nodes):
num_blocks = 11
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
full_node_1 = full_node_api_1.full_node
# Farm blocks
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Coinbase that gets spent
block1 = blocks[2]
spend_coin_block_1 = None
for coin in list(block1.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin_block_1 = coin
# This condition requires block1 coinbase to be spent after index 11
# This condition requires block1 coinbase to be spent more than 10 block after it was farmed
# block index has to be greater than (2 + 9 = 11)
block1_cvp = ConditionWithArgs(ConditionOpcode.ASSERT_HEIGHT_RELATIVE, [int_to_bytes(9)])
block1_dic = {block1_cvp.opcode: [block1_cvp]}
block1_spend_bundle = wallet_a.generate_signed_transaction(
1000, receiver_puzzlehash, spend_coin_block_1, block1_dic
)
# program that will be sent too early
assert block1_spend_bundle is not None
invalid_new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=block1_spend_bundle,
guarantee_transaction_block=True,
)
# Try to validate that block at index 11
await _validate_and_add_block(
full_node_1.blockchain, invalid_new_blocks[-1], expected_error=Err.ASSERT_HEIGHT_RELATIVE_FAILED
)
new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
guarantee_transaction_block=True,
)
await _validate_and_add_block(full_node_1.blockchain, new_blocks[-1])
# At index 12, it can be spent
new_blocks = bt.get_consecutive_blocks(
1,
new_blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=block1_spend_bundle,
guarantee_transaction_block=True,
)
await _validate_and_add_block(full_node_1.blockchain, new_blocks[-1])
@pytest.mark.asyncio
async def test_assert_seconds_relative(self, two_nodes):
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
full_node_api_1, full_node_api_2, server_1, server_2, bt = two_nodes
full_node_1 = full_node_api_1.full_node
# Farm blocks
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Coinbase that gets spent
block1 = blocks[2]
spend_coin_block_1 = None
for coin in list(block1.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin_block_1 = coin
# This condition requires block1 coinbase to be spent 300 seconds after coin creation
block1_cvp = ConditionWithArgs(ConditionOpcode.ASSERT_SECONDS_RELATIVE, [int_to_bytes(300)])
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | true |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/blockchain/config.py | tests/blockchain/config.py | from __future__ import annotations
parallel = True
job_timeout = 60
checkout_blocks_and_plots = True
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/blockchain/blockchain_test_utils.py | tests/blockchain/blockchain_test_utils.py | from typing import Optional, List
from flax.consensus.blockchain import Blockchain, ReceiveBlockResult
from flax.consensus.multiprocess_validation import PreValidationResult
from flax.types.full_block import FullBlock
from flax.util.errors import Err
from flax.util.ints import uint64, uint32
async def check_block_store_invariant(bc: Blockchain):
db_wrapper = bc.block_store.db_wrapper
if db_wrapper.db_version == 1:
return
in_chain = set()
max_height = -1
async with db_wrapper.writer_maybe_transaction() as conn:
async with conn.execute("SELECT height, in_main_chain FROM full_blocks") as cursor:
rows = await cursor.fetchall()
for row in rows:
height = row[0]
# if this block is in-chain, ensure we haven't found another block
# at this height that's also in chain. That would be an invariant
# violation
if row[1]:
# make sure we don't have any duplicate heights. Each block
# height can only have a single block with in_main_chain set
assert height not in in_chain
in_chain.add(height)
if height > max_height:
max_height = height
# make sure every height is represented in the set
assert len(in_chain) == max_height + 1
async def _validate_and_add_block(
blockchain: Blockchain,
block: FullBlock,
expected_result: Optional[ReceiveBlockResult] = None,
expected_error: Optional[Err] = None,
skip_prevalidation: bool = False,
fork_point_with_peak: Optional[uint32] = None,
) -> None:
# Tries to validate and add the block, and checks that there are no errors in the process and that the
# block is added to the peak.
# If expected_result is not None, that result will be enforced.
# If expected_error is not None, that error will be enforced. If expected_error is not None,
# receive_block must return Err.INVALID_BLOCK.
# If expected_result == INVALID_BLOCK but expected_error is None, we will allow for errors to happen
await check_block_store_invariant(blockchain)
if skip_prevalidation:
results = PreValidationResult(None, uint64(1), None, False)
else:
# Do not change this, validate_signatures must be False
pre_validation_results: List[PreValidationResult] = await blockchain.pre_validate_blocks_multiprocessing(
[block], {}, validate_signatures=False
)
assert pre_validation_results is not None
results = pre_validation_results[0]
if results.error is not None:
if expected_result == ReceiveBlockResult.INVALID_BLOCK and expected_error is None:
# We expected an error but didn't specify which one
await check_block_store_invariant(blockchain)
return None
if expected_error is None:
# We did not expect an error
raise AssertionError(Err(results.error))
elif Err(results.error) != expected_error:
# We expected an error but a different one
raise AssertionError(f"Expected {expected_error} but got {Err(results.error)}")
await check_block_store_invariant(blockchain)
return None
(
result,
err,
_,
) = await blockchain.receive_block(block, results, fork_point_with_peak=fork_point_with_peak)
await check_block_store_invariant(blockchain)
if expected_error is None and expected_result != ReceiveBlockResult.INVALID_BLOCK:
# Expecting an error here (but didn't specify which), let's check if we actually got an error
if err is not None:
# Got an error
raise AssertionError(err)
else:
# Here we will enforce checking of the exact error
if err != expected_error:
# Did not get the right error, or did not get an error
raise AssertionError(f"Expected {expected_error} but got {err}")
if expected_result is not None and expected_result != result:
raise AssertionError(f"Expected {expected_result} but got {result}")
elif expected_result is None:
# If we expected an error assume that expected_result = INVALID_BLOCK
if expected_error is not None and result != ReceiveBlockResult.INVALID_BLOCK:
raise AssertionError(f"Block should be invalid, but received: {result}")
# Otherwise, assume that expected_result = NEW_PEAK
if expected_error is None and result != ReceiveBlockResult.NEW_PEAK:
raise AssertionError(f"Block was not added: {result}")
async def _validate_and_add_block_multi_error(
blockchain: Blockchain, block: FullBlock, expected_errors: List[Err], skip_prevalidation: bool = False
) -> None:
# Checks that the blockchain returns one of the expected errors
try:
await _validate_and_add_block(blockchain, block, skip_prevalidation=skip_prevalidation)
except Exception as e:
assert isinstance(e, AssertionError)
assert e.args[0] in expected_errors
return
raise AssertionError("Did not return an error")
async def _validate_and_add_block_multi_result(
blockchain: Blockchain,
block: FullBlock,
expected_result: List[ReceiveBlockResult],
skip_prevalidation: Optional[bool] = None,
) -> None:
try:
if skip_prevalidation is not None:
await _validate_and_add_block(blockchain, block, skip_prevalidation=skip_prevalidation)
else:
await _validate_and_add_block(blockchain, block)
except Exception as e:
assert isinstance(e, AssertionError)
assert "Block was not added" in e.args[0]
expected_list: List[str] = [f"Block was not added: {res}" for res in expected_result]
if e.args[0] not in expected_list:
raise AssertionError(f"{e.args[0].split('Block was not added: ')[1]} not in {expected_result}")
async def _validate_and_add_block_no_error(
blockchain: Blockchain, block: FullBlock, skip_prevalidation: Optional[bool] = None
) -> None:
# adds a block and ensures that there is no error. However, does not ensure that block extended the peak of
# the blockchain
await _validate_and_add_block_multi_result(
blockchain,
block,
expected_result=[
ReceiveBlockResult.ALREADY_HAVE_BLOCK,
ReceiveBlockResult.NEW_PEAK,
ReceiveBlockResult.ADDED_AS_ORPHAN,
],
skip_prevalidation=skip_prevalidation,
)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/blockchain/__init__.py | tests/blockchain/__init__.py | python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false | |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/test_full_block_utils.py | tests/util/test_full_block_utils.py | from __future__ import annotations
import random
from typing import Generator, Iterator, List, Optional
import pytest
from blspy import G1Element, G2Element
from benchmarks.utils import rand_bytes, rand_g1, rand_g2, rand_hash, rand_vdf, rand_vdf_proof, rewards
from flax.types.blockchain_format.foliage import Foliage, FoliageBlockData, FoliageTransactionBlock, TransactionsInfo
from flax.types.blockchain_format.pool_target import PoolTarget
from flax.types.blockchain_format.program import SerializedProgram
from flax.types.blockchain_format.proof_of_space import ProofOfSpace
from flax.types.blockchain_format.reward_chain_block import RewardChainBlock
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.blockchain_format.slots import (
ChallengeChainSubSlot,
InfusedChallengeChainSubSlot,
RewardChainSubSlot,
SubSlotProofs,
)
from flax.types.blockchain_format.vdf import VDFInfo, VDFProof
from flax.types.end_of_slot_bundle import EndOfSubSlotBundle
from flax.types.full_block import FullBlock
from flax.types.header_block import HeaderBlock
from flax.util.full_block_utils import block_info_from_block, generator_from_block, header_block_from_block
from flax.util.generator_tools import get_block_header
from flax.util.ints import uint8, uint32, uint64, uint128
test_g2s: List[G2Element] = [rand_g2() for _ in range(10)]
test_g1s: List[G1Element] = [rand_g1() for _ in range(10)]
test_hashes: List[bytes32] = [rand_hash() for _ in range(100)]
test_vdfs: List[VDFInfo] = [rand_vdf() for _ in range(100)]
test_vdf_proofs: List[VDFProof] = [rand_vdf_proof() for _ in range(100)]
def g2() -> G2Element:
return random.sample(test_g2s, 1)[0]
def g1() -> G1Element:
return random.sample(test_g1s, 1)[0]
def hsh() -> bytes32:
return random.sample(test_hashes, 1)[0]
def vdf() -> VDFInfo:
return random.sample(test_vdfs, 1)[0]
def vdf_proof() -> VDFProof:
return random.sample(test_vdf_proofs, 1)[0]
def get_proof_of_space() -> Generator[ProofOfSpace, None, None]:
for pool_pk in [g1(), None]:
for plot_hash in [hsh(), None]:
yield ProofOfSpace(
hsh(), # challenge
pool_pk,
plot_hash,
g1(), # plot_public_key
uint8(32),
rand_bytes(8 * 32),
)
def get_reward_chain_block(height: uint32) -> Generator[RewardChainBlock, None, None]:
for has_transactions in [True, False]:
for challenge_chain_sp_vdf in [vdf(), None]:
for reward_chain_sp_vdf in [vdf(), None]:
for infused_challenge_chain_ip_vdf in [vdf(), None]:
for proof_of_space in get_proof_of_space():
weight = uint128(random.randint(0, 1000000000))
iters = uint128(123456)
sp_index = uint8(0)
yield RewardChainBlock(
weight,
uint32(height),
iters,
sp_index,
hsh(), # pos_ss_cc_challenge_hash
proof_of_space,
challenge_chain_sp_vdf,
g2(), # challenge_chain_sp_signature
vdf(), # challenge_chain_ip_vdf
reward_chain_sp_vdf,
g2(), # reward_chain_sp_signature
vdf(), # reward_chain_ip_vdf
infused_challenge_chain_ip_vdf,
has_transactions,
)
def get_foliage_block_data() -> Generator[FoliageBlockData, None, None]:
for pool_signature in [g2(), None]:
pool_target = PoolTarget(
hsh(), # puzzle_hash
uint32(0), # max_height
)
yield FoliageBlockData(
hsh(), # unfinished_reward_block_hash
pool_target,
pool_signature, # pool_signature
hsh(), # farmer_reward_puzzle_hash
hsh(), # extension_data
)
def get_foliage() -> Generator[Foliage, None, None]:
for foliage_block_data in get_foliage_block_data():
for foliage_transaction_block_hash in [hsh(), None]:
for foliage_transaction_block_signature in [g2(), None]:
yield Foliage(
hsh(), # prev_block_hash
hsh(), # reward_block_hash
foliage_block_data,
g2(), # foliage_block_data_signature
foliage_transaction_block_hash,
foliage_transaction_block_signature,
)
def get_foliage_transaction_block() -> Generator[Optional[FoliageTransactionBlock], None, None]:
yield None
timestamp = uint64(1631794488)
yield FoliageTransactionBlock(
hsh(), # prev_transaction_block
timestamp,
hsh(), # filter_hash
hsh(), # additions_root
hsh(), # removals_root
hsh(), # transactions_info_hash
)
def get_transactions_info(height: uint32, foliage_transaction_block: Optional[FoliageTransactionBlock]):
if not foliage_transaction_block:
yield None
else:
farmer_coin, pool_coin = rewards(uint32(height))
reward_claims_incorporated = [farmer_coin, pool_coin]
fees = uint64(random.randint(0, 150000))
yield TransactionsInfo(
hsh(), # generator_root
hsh(), # generator_refs_root
g2(), # aggregated_signature
fees,
uint64(random.randint(0, 12000000000)), # cost
reward_claims_incorporated,
)
def get_challenge_chain_sub_slot() -> Generator[ChallengeChainSubSlot, None, None]:
for infused_chain_sub_slot_hash in [hsh(), None]:
for sub_epoch_summary_hash in [hsh(), None]:
for new_sub_slot_iters in [uint64(random.randint(0, 4000000000)), None]:
for new_difficulty in [uint64(random.randint(1, 30)), None]:
yield ChallengeChainSubSlot(
vdf(), # challenge_chain_end_of_slot_vdf
infused_chain_sub_slot_hash,
sub_epoch_summary_hash,
new_sub_slot_iters,
new_difficulty,
)
def get_reward_chain_sub_slot() -> Generator[RewardChainSubSlot, None, None]:
for infused_challenge_chain_sub_slot_hash in [hsh(), None]:
yield RewardChainSubSlot(
vdf(), # end_of_slot_vdf
hsh(), # challenge_chain_sub_slot_hash
infused_challenge_chain_sub_slot_hash,
uint8(random.randint(0, 255)), # deficit
)
def get_sub_slot_proofs() -> Generator[SubSlotProofs, None, None]:
for infused_challenge_chain_slot_proof in [vdf_proof(), None]:
yield SubSlotProofs(
vdf_proof(), # challenge_chain_slot_proof
infused_challenge_chain_slot_proof,
vdf_proof(), # reward_chain_slot_proof
)
def get_end_of_sub_slot() -> Generator[EndOfSubSlotBundle, None, None]:
for challenge_chain in get_challenge_chain_sub_slot():
for infused_challenge_chain in [InfusedChallengeChainSubSlot(vdf()), None]:
for reward_chain in get_reward_chain_sub_slot():
for proofs in get_sub_slot_proofs():
yield EndOfSubSlotBundle(
challenge_chain,
infused_challenge_chain,
reward_chain,
proofs,
)
def get_finished_sub_slots() -> Generator[List[EndOfSubSlotBundle], None, None]:
yield []
yield [s for s in get_end_of_sub_slot()]
def get_ref_list() -> Generator[List[uint32], None, None]:
yield []
yield [uint32(1), uint32(2), uint32(3), uint32(4)]
yield [uint32(256)]
yield [uint32(0xFFFFFFFF)]
def get_full_blocks() -> Iterator[FullBlock]:
random.seed(123456789)
generator = SerializedProgram.from_bytes(bytes.fromhex("ff01820539"))
for foliage in get_foliage():
for foliage_transaction_block in get_foliage_transaction_block():
height = uint32(random.randint(0, 1000000))
for reward_chain_block in get_reward_chain_block(height):
for transactions_info in get_transactions_info(height, foliage_transaction_block):
for challenge_chain_sp_proof in [vdf_proof(), None]:
for reward_chain_sp_proof in [vdf_proof(), None]:
for infused_challenge_chain_ip_proof in [vdf_proof(), None]:
for finished_sub_slots in get_finished_sub_slots():
for refs_list in get_ref_list():
yield FullBlock(
finished_sub_slots,
reward_chain_block,
challenge_chain_sp_proof,
vdf_proof(), # challenge_chain_ip_proof
reward_chain_sp_proof,
vdf_proof(), # reward_chain_ip_proof
infused_challenge_chain_ip_proof,
foliage,
foliage_transaction_block,
transactions_info,
generator, # transactions_generator
refs_list, # transactions_generator_ref_list
)
@pytest.mark.asyncio
# @pytest.mark.skip("This test is expensive and has already convinced us the parser works")
async def test_parser():
# loop over every combination of Optionals being set and not set
# along with random values for the FullBlock fields. Ensure
# generator_from_block() successfully parses out the generator object
# correctly
for block in get_full_blocks():
block_bytes = bytes(block)
gen = generator_from_block(block_bytes)
assert gen == block.transactions_generator
bi = block_info_from_block(block_bytes)
assert block.transactions_generator == bi.transactions_generator
assert block.prev_header_hash == bi.prev_header_hash
assert block.transactions_generator_ref_list == bi.transactions_generator_ref_list
# this doubles the run-time of this test, with questionable utility
# assert gen == FullBlock.from_bytes(block_bytes).transactions_generator
@pytest.mark.asyncio
@pytest.mark.skip("This test is expensive and has already convinced us the parser works")
async def test_header_block():
for block in get_full_blocks():
hb: HeaderBlock = get_block_header(block, [], [])
hb_bytes = header_block_from_block(memoryview(bytes(block)))
assert HeaderBlock.from_bytes(hb_bytes) == hb
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/blockchain.py | tests/util/blockchain.py | import os
import pickle
from pathlib import Path
from typing import List, Optional
import tempfile
from flax.consensus.blockchain import Blockchain
from flax.consensus.constants import ConsensusConstants
from flax.full_node.block_store import BlockStore
from flax.full_node.coin_store import CoinStore
from flax.types.full_block import FullBlock
from flax.util.db_wrapper import DBWrapper2
from flax.util.default_root import DEFAULT_ROOT_PATH
from flax.simulator.block_tools import BlockTools
async def create_blockchain(constants: ConsensusConstants, db_version: int):
db_path = Path(tempfile.NamedTemporaryFile().name)
if db_path.exists():
db_path.unlink()
wrapper = await DBWrapper2.create(database=db_path, reader_count=1, db_version=db_version)
coin_store = await CoinStore.create(wrapper)
store = await BlockStore.create(wrapper)
bc1 = await Blockchain.create(coin_store, store, constants, Path("."), 2)
assert bc1.get_peak() is None
return bc1, wrapper, db_path
def persistent_blocks(
num_of_blocks: int,
db_name: str,
bt: BlockTools,
seed: bytes = b"",
empty_sub_slots=0,
normalized_to_identity_cc_eos: bool = False,
normalized_to_identity_icc_eos: bool = False,
normalized_to_identity_cc_sp: bool = False,
normalized_to_identity_cc_ip: bool = False,
block_list_input: List[FullBlock] = None,
time_per_block: Optional[float] = None,
):
# try loading from disc, if not create new blocks.db file
# TODO hash fixtures.py and blocktool.py, add to path, delete if the files changed
if block_list_input is None:
block_list_input = []
block_path_dir = DEFAULT_ROOT_PATH.parent.joinpath("blocks")
file_path = block_path_dir.joinpath(db_name)
ci = os.environ.get("CI")
if ci is not None and not file_path.exists():
raise Exception(f"Running in CI and expected path not found: {file_path!r}")
block_path_dir.mkdir(parents=True, exist_ok=True)
if file_path.exists():
print(f"File found at: {file_path}")
try:
bytes_list = file_path.read_bytes()
block_bytes_list: List[bytes] = pickle.loads(bytes_list)
blocks: List[FullBlock] = []
for block_bytes in block_bytes_list:
blocks.append(FullBlock.from_bytes(block_bytes))
if len(blocks) == num_of_blocks + len(block_list_input):
print(f"\n loaded {file_path} with {len(blocks)} blocks")
return blocks
except EOFError:
print("\n error reading db file")
else:
print(f"File not found at: {file_path}")
print("Creating a new test db")
return new_test_db(
file_path,
num_of_blocks,
seed,
empty_sub_slots,
bt,
block_list_input,
time_per_block,
normalized_to_identity_cc_eos,
normalized_to_identity_icc_eos,
normalized_to_identity_cc_sp,
normalized_to_identity_cc_ip,
)
def new_test_db(
path: Path,
num_of_blocks: int,
seed: bytes,
empty_sub_slots: int,
bt: BlockTools,
block_list_input: List[FullBlock],
time_per_block: Optional[float],
normalized_to_identity_cc_eos: bool = False, # CC_EOS,
normalized_to_identity_icc_eos: bool = False, # ICC_EOS
normalized_to_identity_cc_sp: bool = False, # CC_SP,
normalized_to_identity_cc_ip: bool = False, # CC_IP
):
print(f"create {path} with {num_of_blocks} blocks with ")
blocks: List[FullBlock] = bt.get_consecutive_blocks(
num_of_blocks,
block_list_input=block_list_input,
time_per_block=time_per_block,
seed=seed,
skip_slots=empty_sub_slots,
normalized_to_identity_cc_eos=normalized_to_identity_cc_eos,
normalized_to_identity_icc_eos=normalized_to_identity_icc_eos,
normalized_to_identity_cc_sp=normalized_to_identity_cc_sp,
normalized_to_identity_cc_ip=normalized_to_identity_cc_ip,
)
block_bytes_list: List[bytes] = []
for block in blocks:
block_bytes_list.append(bytes(block))
bytes_fn = pickle.dumps(block_bytes_list)
path.write_bytes(bytes_fn)
return blocks
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/build_network_protocol_files.py | tests/util/build_network_protocol_files.py | import os
import subprocess
import sysconfig
from typing import Callable, Any
from pathlib import Path
from tests.util.network_protocol_data import * # noqa: F403
from flax.util.ints import uint32
version = "1.0"
tests_dir = Path(__file__).resolve().parent
def get_network_protocol_filename() -> Path:
return tests_dir / Path("protocol_messages_bytes-v" + version)
def encode_data(data) -> bytes:
data_bytes = bytes(data)
size = uint32(len(data_bytes))
return size.to_bytes(4, "big") + data_bytes
def visit_farmer_protocol(visitor: Callable[[Any, str], None]) -> None:
visitor(new_signage_point, "new_signage_point")
visitor(declare_proof_of_space, "declare_proof_of_space")
visitor(request_signed_values, "request_signed_values")
visitor(farming_info, "farming_info")
visitor(signed_values, "signed_values")
def visit_full_node(visitor: Callable[[Any, str], None]) -> None:
visitor(new_peak, "new_peak")
visitor(new_transaction, "new_transaction")
visitor(request_transaction, "request_transaction")
visitor(respond_transaction, "respond_transaction")
visitor(request_proof_of_weight, "request_proof_of_weight")
visitor(respond_proof_of_weight, "respond_proof_of_weight")
visitor(request_block, "request_block")
visitor(reject_block, "reject_block")
visitor(request_blocks, "request_blocks")
visitor(respond_blocks, "respond_blocks")
visitor(reject_blocks, "reject_blocks")
visitor(respond_block, "respond_block")
visitor(new_unfinished_block, "new_unfinished_block")
visitor(request_unfinished_block, "request_unfinished_block")
visitor(respond_unfinished_block, "respond_unfinished_block")
visitor(new_signage_point_or_end_of_subslot, "new_signage_point_or_end_of_subslot")
visitor(request_signage_point_or_end_of_subslot, "request_signage_point_or_end_of_subslot")
visitor(respond_signage_point, "respond_signage_point")
visitor(respond_end_of_subslot, "respond_end_of_subslot")
visitor(request_mempool_transaction, "request_mempool_transaction")
visitor(new_compact_vdf, "new_compact_vdf")
visitor(request_compact_vdf, "request_compact_vdf")
visitor(respond_compact_vdf, "respond_compact_vdf")
visitor(request_peers, "request_peers")
visitor(respond_peers, "respond_peers")
def visit_wallet_protocol(visitor: Callable[[Any, str], None]) -> None:
visitor(request_puzzle_solution, "request_puzzle_solution")
visitor(puzzle_solution_response, "puzzle_solution_response")
visitor(respond_puzzle_solution, "respond_puzzle_solution")
visitor(reject_puzzle_solution, "reject_puzzle_solution")
visitor(send_transaction, "send_transaction")
visitor(transaction_ack, "transaction_ack")
visitor(new_peak_wallet, "new_peak_wallet")
visitor(request_block_header, "request_block_header")
visitor(request_block_headers, "request_block_headers")
visitor(respond_header_block, "respond_header_block")
visitor(respond_block_headers, "respond_block_headers")
visitor(reject_header_request, "reject_header_request")
visitor(request_removals, "request_removals")
visitor(respond_removals, "respond_removals")
visitor(reject_removals_request, "reject_removals_request")
visitor(request_additions, "request_additions")
visitor(respond_additions, "respond_additions")
visitor(reject_additions, "reject_additions")
visitor(request_header_blocks, "request_header_blocks")
visitor(reject_header_blocks, "reject_header_blocks")
visitor(respond_header_blocks, "respond_header_blocks")
visitor(coin_state, "coin_state")
visitor(register_for_ph_updates, "register_for_ph_updates")
visitor(reject_block_headers, "reject_block_headers"),
visitor(respond_to_ph_updates, "respond_to_ph_updates")
visitor(register_for_coin_updates, "register_for_coin_updates")
visitor(respond_to_coin_updates, "respond_to_coin_updates")
visitor(coin_state_update, "coin_state_update")
visitor(request_children, "request_children")
visitor(respond_children, "respond_children")
visitor(request_ses_info, "request_ses_info")
visitor(respond_ses_info, "respond_ses_info")
def visit_harvester_protocol(visitor: Callable[[Any, str], None]) -> None:
visitor(pool_difficulty, "pool_difficulty")
visitor(harvester_handhsake, "harvester_handhsake")
visitor(new_signage_point_harvester, "new_signage_point_harvester")
visitor(new_proof_of_space, "new_proof_of_space")
visitor(request_signatures, "request_signatures")
visitor(respond_signatures, "respond_signatures")
visitor(plot, "plot")
visitor(request_plots, "request_plots")
visitor(respond_plots, "respond_plots")
def visit_introducer_protocol(visitor: Callable[[Any, str], None]) -> None:
visitor(request_peers_introducer, "request_peers_introducer")
visitor(respond_peers_introducer, "respond_peers_introducer")
def visit_pool_protocol(visitor: Callable[[Any, str], None]) -> None:
visitor(authentication_payload, "authentication_payload")
visitor(get_pool_info_response, "get_pool_info_response")
visitor(post_partial_payload, "post_partial_payload")
visitor(post_partial_request, "post_partial_request")
visitor(post_partial_response, "post_partial_response")
visitor(get_farmer_response, "get_farmer_response")
visitor(post_farmer_payload, "post_farmer_payload")
visitor(post_farmer_request, "post_farmer_request")
visitor(post_farmer_response, "post_farmer_response")
visitor(put_farmer_payload, "put_farmer_payload")
visitor(put_farmer_request, "put_farmer_request")
visitor(put_farmer_response, "put_farmer_response")
visitor(error_response, "error_response")
def visit_timelord_protocol(visitor: Callable[[Any, str], None]) -> None:
visitor(new_peak_timelord, "new_peak_timelord")
visitor(new_unfinished_block_timelord, "new_unfinished_block_timelord")
visitor(new_infusion_point_vdf, "new_infusion_point_vdf")
visitor(new_signage_point_vdf, "new_signage_point_vdf")
visitor(new_end_of_sub_slot_bundle, "new_end_of_sub_slot_bundle")
visitor(request_compact_proof_of_time, "request_compact_proof_of_time")
visitor(respond_compact_proof_of_time, "respond_compact_proof_of_time")
def visit_all_messages(visitor: Callable[[Any, str], None]) -> None:
visit_farmer_protocol(visitor)
visit_full_node(visitor)
visit_wallet_protocol(visitor)
visit_harvester_protocol(visitor)
visit_introducer_protocol(visitor)
visit_pool_protocol(visitor)
visit_timelord_protocol(visitor)
def get_protocol_bytes() -> bytes:
result = b""
def visitor(obj: Any, name: str) -> None:
nonlocal result
result += encode_data(obj)
visit_all_messages(visitor)
return result
def build_protocol_test() -> str:
result = """# this file is generated by build_network_protocol_files.py
from typing import Tuple
from pathlib import Path
from tests.util.network_protocol_data import * # noqa: F403
from tests.util.protocol_messages_json import * # noqa: F403
from tests.util.build_network_protocol_files import get_network_protocol_filename
def parse_blob(input_bytes: bytes) -> Tuple[bytes, bytes]:
size_bytes = input_bytes[:4]
input_bytes = input_bytes[4:]
size = int.from_bytes(size_bytes, "big")
message_bytes = input_bytes[:size]
input_bytes = input_bytes[size:]
return (message_bytes, input_bytes)
def test_protocol_bytes() -> None:
filename: Path = get_network_protocol_filename()
assert filename.exists()
with open(filename, "rb") as f:
input_bytes = f.read()
"""
def visitor(obj: Any, name: str) -> None:
nonlocal result
result += f""" message_bytes, input_bytes = parse_blob(input_bytes)
message = type({name}).from_bytes(message_bytes)
assert message == {name}
assert bytes(message) == bytes({name})
"""
visit_all_messages(visitor)
result += ' assert input_bytes == b""\n'
return result
def get_protocol_json() -> str:
result = """# this file is generated by build_network_protocol_files.py
from typing import Any, Dict
"""
counter = 0
def visitor(obj: Any, name: str) -> None:
nonlocal result
nonlocal counter
result += f"\n{name}_json: Dict[str, Any] = {obj.to_json_dict()}\n"
counter += 1
visit_all_messages(visitor)
return result
def build_json_test() -> str:
result = """# this file is generated by build_network_protocol_files.py
from tests.util.network_protocol_data import * # noqa: F403
from tests.util.protocol_messages_json import * # noqa: F403
def test_protocol_json() -> None:
"""
counter = 0
def visitor(obj: Any, name: str) -> None:
nonlocal result
nonlocal counter
result += f" assert str({name}_json) == str({name}.to_json_dict())\n"
result += f" assert type({name}).from_json_dict({name}_json) == {name}\n"
counter += 1
visit_all_messages(visitor)
return result
if __name__ == "__main__":
get_network_protocol_filename().write_bytes(get_protocol_bytes())
name_to_function = {
"test_network_protocol_files.py": build_protocol_test,
"protocol_messages_json.py": get_protocol_json,
"test_network_protocol_json.py": build_json_test,
}
scripts_path = Path(sysconfig.get_path("scripts"))
for name, function in name_to_function.items():
path = tests_dir.joinpath(name)
path.write_text(function())
# black seems to have trouble when run as a module so not using `python -m black`
subprocess.run(
[scripts_path.joinpath("black"), os.fspath(path.relative_to(tests_dir))],
check=True,
cwd=tests_dir,
)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/benchmark_cost.py | tests/util/benchmark_cost.py | import time
from secrets import token_bytes
from blspy import AugSchemeMPL, PrivateKey
from clvm_tools import binutils
from flax.consensus.default_constants import DEFAULT_CONSTANTS
from flax.types.blockchain_format.program import Program, INFINITE_COST
from flax.types.condition_opcodes import ConditionOpcode
from flax.types.condition_with_args import ConditionWithArgs
from flax.util.ints import uint32
from flax.simulator.wallet_tools import WalletTool
from flax.wallet.derive_keys import master_sk_to_wallet_sk
from flax.wallet.puzzles.p2_delegated_puzzle import puzzle_for_pk
def float_to_str(f):
float_string = repr(f)
if "e" in float_string: # detect scientific notation
digits, exp_str = float_string.split("e")
digits = digits.replace(".", "").replace("-", "")
exp = int(exp_str)
zero_padding = "0" * (abs(int(exp)) - 1) # minus 1 for decimal point in the sci notation
sign = "-" if f < 0 else ""
if exp > 0:
float_string = "{}{}{}.0".format(sign, digits, zero_padding)
else:
float_string = "{}0.{}{}".format(sign, zero_padding, digits)
return float_string
def run_and_return_cost_time(flaxlisp):
start = time.time()
clvm_loop = "((c (q ((c (f (a)) (c (f (a)) (c (f (r (a))) (c (f (r (r (a))))"
" (q ()))))))) (c (q ((c (i (f (r (a))) (q (i (q 1) ((c (f (a)) (c (f (a))"
" (c (- (f (r (a))) (q 1)) (c (f (r (r (a)))) (q ()))))))"
" ((c (f (r (r (a)))) (q ()))))) (q (q ()))) (a)))) (a))))"
loop_program = Program.to(binutils.assemble(clvm_loop))
clvm_loop_solution = f"(1000 {flaxlisp})"
solution_program = Program.to(binutils.assemble(clvm_loop_solution))
cost, sexp = loop_program.run_with_cost(solution_program, INFINITE_COST)
end = time.time()
total_time = end - start
return cost, total_time
def get_cost_compared_to_addition(addition_cost, addition_time, other_time):
return (addition_cost * other_time) / addition_time
def benchmark_all_operators():
addition = "(+ (q 1000000000) (q 1000000000))"
substraction = "(- (q 1000000000) (q 1000000000))"
multiply = "(* (q 1000000000) (q 1000000000))"
greater = "(> (q 1000000000) (q 1000000000))"
equal = "(= (q 1000000000) (q 1000000000))"
if_clvm = "(i (= (q 1000000000) (q 1000000000)) (q 1000000000) (q 1000000000))"
sha256tree = "(sha256 (q 1000000000))"
pubkey_for_exp = "(pubkey_for_exp (q 1))"
point_add = "(point_add"
" (q 0x17f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb)"
" (q 0x17f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb))"
point_add_cost, point_add_time = run_and_return_cost_time(point_add)
addition_cost, addition_time = run_and_return_cost_time(addition)
substraction_cost, substraction_time = run_and_return_cost_time(substraction)
multiply_cost, multiply_time = run_and_return_cost_time(multiply)
greater_cost, greater_time = run_and_return_cost_time(greater)
equal_cost, equal_time = run_and_return_cost_time(equal)
if_cost, if_time = run_and_return_cost_time(if_clvm)
sha256tree_cost, sha256tree_time = run_and_return_cost_time(sha256tree)
pubkey_for_exp_cost, pubkey_for_exp_time = run_and_return_cost_time(pubkey_for_exp)
one_addition = 1
one_substraction = get_cost_compared_to_addition(addition_cost, addition_time, substraction_time) / addition_cost
one_multiply = get_cost_compared_to_addition(addition_cost, addition_time, multiply_time) / addition_cost
one_greater = get_cost_compared_to_addition(addition_cost, addition_time, greater_time) / addition_cost
one_equal = get_cost_compared_to_addition(addition_cost, addition_time, equal_time) / addition_cost
one_if = get_cost_compared_to_addition(addition_cost, addition_time, if_time) / addition_cost
one_sha256 = get_cost_compared_to_addition(addition_cost, addition_time, sha256tree_time) / addition_cost
one_pubkey_for_exp = (
get_cost_compared_to_addition(addition_cost, addition_time, pubkey_for_exp_time) / addition_cost
)
one_point_add = get_cost_compared_to_addition(addition_cost, addition_time, point_add_time) / addition_cost
print(f"cost of addition is: {one_addition}")
print(f"cost of one_substraction is: {one_substraction}")
print(f"cost of one_multiply is: {one_multiply}")
print(f"cost of one_greater is: {one_greater}")
print(f"cost of one_equal is: {one_equal}")
print(f"cost of one_if is: {one_if}")
print(f"cost of one_sha256 is: {one_sha256}")
print(f"cost of one_pubkey_for_exp is: {one_pubkey_for_exp}")
print(f"cost of one_point_add is: {one_point_add}")
if __name__ == "__main__":
"""
Naive way to calculate cost ratio between vByte and CLVM cost unit.
AggSig has assigned cost of 20vBytes, simple CLVM program is benchmarked against it.
"""
wallet_tool = WalletTool(DEFAULT_CONSTANTS)
benchmark_all_operators()
secret_key: PrivateKey = AugSchemeMPL.key_gen(bytes([2] * 32))
puzzles = []
solutions = []
private_keys = []
public_keys = []
for i in range(0, 1000):
private_key: PrivateKey = master_sk_to_wallet_sk(secret_key, uint32(i))
public_key = private_key.public_key()
solution = wallet_tool.make_solution(
{ConditionOpcode.ASSERT_MY_COIN_ID: [ConditionWithArgs(ConditionOpcode.ASSERT_MY_COIN_ID, [token_bytes()])]}
)
puzzle = puzzle_for_pk(bytes(public_key))
puzzles.append(puzzle)
solutions.append(solution)
private_keys.append(private_key)
public_keys.append(public_key)
# Run Puzzle 1000 times
puzzle_start = time.time()
clvm_cost = 0
for i in range(0, 1000):
cost_run, sexp = puzzles[i].run_with_cost(solutions[i], INFINITE_COST)
clvm_cost += cost_run
puzzle_end = time.time()
puzzle_time = puzzle_end - puzzle_start
print(f"Puzzle_time is: {puzzle_time}")
print(f"Puzzle cost sum is: {clvm_cost}")
private_key = master_sk_to_wallet_sk(secret_key, uint32(0))
public_key = private_key.get_g1()
message = token_bytes()
signature = AugSchemeMPL.sign(private_key, message)
pk_message_pair = (public_key, message)
# Run AggSig 1000 times
agg_sig_start = time.time()
agg_sig_cost = 0
for i in range(0, 1000):
valid = AugSchemeMPL.verify(public_key, message, signature)
assert valid
agg_sig_cost += 20
agg_sig_end = time.time()
agg_sig_time = agg_sig_end - agg_sig_start
print(f"Aggsig Cost: {agg_sig_cost}")
print(f"Aggsig time is: {agg_sig_time}")
# clvm_should_cost = agg_sig_cost * puzzle_time / agg_sig_time
clvm_should_cost = (agg_sig_cost * puzzle_time) / agg_sig_time
print(f"Puzzle should cost: {clvm_should_cost}")
constant = clvm_should_cost / clvm_cost
format = float_to_str(constant)
print(f"Constant factor: {format}")
print(f"CLVM RATIO MULTIPLIER: {1/constant}")
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/protocol_messages_json.py | tests/util/protocol_messages_json.py | # this file is generated by build_network_protocol_files.py
from __future__ import annotations
from typing import Any, Dict
new_signage_point_json: Dict[str, Any] = {
"challenge_hash": "0x34b2a753b0dc864e7218f8facf23ca0e2b636351df5289b76f5845d9a78b7026",
"challenge_chain_sp": "0x9dc8b9d685c79acdf8780d994416dfcfb118e0adc99769ecfa94e1f40aa5bbe5",
"reward_chain_sp": "0xb2828a2c7f6a2555c80c3ca9d10792a7da6ee80f686122ecd2c748dc0569a867",
"difficulty": 2329045448547720842,
"sub_slot_iters": 8265724497259558930,
"signage_point_index": 194,
}
declare_proof_of_space_json: Dict[str, Any] = {
"challenge_hash": "0x3f44d177faa11cea40477f233a8b365cce77215a84f48f65a37b2ac35c7e3ccc",
"challenge_chain_sp": "0x931c83fd8ef121177257301e11f41642618ddac65509939e252243e41bacbf78",
"signage_point_index": 31,
"reward_chain_sp": "0x6c8dbcfae52c8df391231f3f7aae24c0b1e2be9638f6fc9e4c216b9ff43548d1",
"proof_of_space": {
"challenge": "0x1fb331df88bc142e70c110e21620374118fb220ccc3ef621378197e850882ec9",
"pool_public_key": "0xa04c6b5ac7dfb935f6feecfdd72348ccf1d4be4fe7e26acf271ea3b7d308da61e0a308f7a62495328a81f5147b66634c",
"pool_contract_puzzle_hash": None,
"plot_public_key": "0xb6449c2c68df97c19e884427e42ee7350982d4020571ead08732615ff39bd216bfd630b6460784982bec98b49fea79d0",
"size": 204,
"proof": "0xa67188ae0c02c49b0e821a9773033a3fbd338030c383080dbb8b1d63f07af427d8075e59d911f85ea562fd967823588f9a405a4464fdf5dc0866ee15bebd6b94cb147e28aa9cf96da930611486b779737ed721ea376b9939ba05357141223d75d21b21f310ec32d85ed3b98cf301494ea91b8501138481f3bfa1c384fd998b1fdd2855ac6f0c8554c520fb0bfa3663f238124035e14682bc11eaf7c372b6af4ed7f59a406810c71711906f8c91f94b1f",
},
"challenge_chain_sp_signature": "0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"reward_chain_sp_signature": "0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"farmer_puzzle_hash": "0x3843d1c2c574d376225733cf1a9c63da7051954b88b5adc1a4c198c1c7d5edfd",
"pool_target": {
"puzzle_hash": "0xd23da14695a188ae5708dd152263c4db883eb27edeb936178d4d988b8f3ce5fc",
"max_height": 421941852,
},
"pool_signature": "0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
}
request_signed_values_json: Dict[str, Any] = {
"quality_string": "0x60649de258d2221ca6a178476861b13f8c394a992eaeae1f1159c32bbf703b45",
"foliage_block_data_hash": "0x9da23e943246bb99ebeb5e773e35a445bbbfdbd45dd9b9df169eeca80880a53b",
"foliage_transaction_block_hash": "0x5d76a4bcb3524d862e92317410583daf50828927885444c6d62ca8843635c46f",
}
farming_info_json: Dict[str, Any] = {
"challenge_hash": "0x345cefad6a04d3ea4fec4b31e56000de622de9fe861afa53424138dd45307fc2",
"sp_hash": "0x1105c288abb976e95804796aea5bb6f66a6b500c0f538d4e71f0d701cad9ff11",
"timestamp": 16359391077414942762,
"passed": 1390832181,
"proofs": 908923578,
"total_plots": 2259819406,
}
signed_values_json: Dict[str, Any] = {
"quality_string": "0x915de5949724e1fc92d334e589c26ddbcd67415cbbdbbfc5e6de93b3b33bb267",
"foliage_block_data_signature": "0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"foliage_transaction_block_signature": "0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
}
new_peak_json: Dict[str, Any] = {
"header_hash": "0x8a346e8dc02e9b44c0571caa74fd99f163d4c5d7deae9f8ddb00528721493f7a",
"height": 2653549198,
"weight": 196318552117141200341240034145143439804,
"fork_point_with_previous_peak": 928039765,
"unfinished_reward_block_hash": "0xdd421c55d4edaeeb3ad60e80d73c2005a1b275c381c7e418915200d7467711b5",
}
new_transaction_json: Dict[str, Any] = {
"transaction_id": "0xe4fe833328d4e82f9c57bc1fc2082c9b63da23e46927522cb5a073f9f0979b6a",
"cost": 13950654730705425115,
"fees": 10674036971945712700,
}
request_transaction_json: Dict[str, Any] = {
"transaction_id": "0x3dc310a07be53bfd701e4a0d77ce39836eeab4717fe25b1ae4c3f16aad0e5d83"
}
respond_transaction_json: Dict[str, Any] = {
"transaction": {
"coin_spends": [
{
"coin": {
"parent_coin_info": "0xd56f435d3382cb9aa5f50f51816e4c54487c66402339901450f3c810f1d77098",
"puzzle_hash": "0x9944f63fcc251719b2f04c47ab976a167f96510736dc6fdfa8e037d740f4b5f3",
"amount": 6602327684212801382,
},
"puzzle_reveal": "0xff01ffff33ffa0f8912302fb33b8188046662785704afc3dd945074e4b45499a7173946e044695ff8203e880ffff33ffa03eaa52e850322dbc281c6b922e9d8819c7b4120ee054c4aa79db50be516a2bcaff8207d08080",
"solution": "0xffff01ffff33ffa0f8912302fb33b8188046662785704afc3dd945074e4b45499a7173946e044695ff8203e880ffff33ffa03eaa52e850322dbc281c6b922e9d8819c7b4120ee054c4aa79db50be516a2bcaff8207d08080ff8080",
}
],
"aggregated_signature": "0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
}
}
request_proof_of_weight_json: Dict[str, Any] = {
"total_number_of_blocks": 1109907246,
"tip": "0x1fa3bfc747762c6edbe9937630e50b6982c3cf4fd67931f2ffcececb8c509839",
}
respond_proof_of_weight_json: Dict[str, Any] = {
"wp": {
"sub_epochs": [
{
"reward_chain_hash": "0x6fdcfaabeb149f9c44c80c230c44771e14b3d4e1b361dcca9c823b7ea7887ffe",
"num_blocks_overflow": 190,
"new_sub_slot_iters": 10527522631566046685,
"new_difficulty": 989988965238543242,
}
],
"sub_epoch_segments": [
{
"sub_epoch_n": 3946877794,
"sub_slots": [
{
"proof_of_space": {
"challenge": "0x1fb331df88bc142e70c110e21620374118fb220ccc3ef621378197e850882ec9",
"pool_public_key": "0xa04c6b5ac7dfb935f6feecfdd72348ccf1d4be4fe7e26acf271ea3b7d308da61e0a308f7a62495328a81f5147b66634c",
"pool_contract_puzzle_hash": None,
"plot_public_key": "0xb6449c2c68df97c19e884427e42ee7350982d4020571ead08732615ff39bd216bfd630b6460784982bec98b49fea79d0",
"size": 204,
"proof": "0xa67188ae0c02c49b0e821a9773033a3fbd338030c383080dbb8b1d63f07af427d8075e59d911f85ea562fd967823588f9a405a4464fdf5dc0866ee15bebd6b94cb147e28aa9cf96da930611486b779737ed721ea376b9939ba05357141223d75d21b21f310ec32d85ed3b98cf301494ea91b8501138481f3bfa1c384fd998b1fdd2855ac6f0c8554c520fb0bfa3663f238124035e14682bc11eaf7c372b6af4ed7f59a406810c71711906f8c91f94b1f",
},
"cc_signage_point": {
"witness_type": 197,
"witness": "0x30303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030",
"normalized_to_identity": False,
},
"cc_infusion_point": {
"witness_type": 197,
"witness": "0x30303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030",
"normalized_to_identity": False,
},
"icc_infusion_point": {
"witness_type": 197,
"witness": "0x30303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030",
"normalized_to_identity": False,
},
"cc_sp_vdf_info": {
"challenge": "0x7cbd5905838c1dc2becd00298a5b3a6e42b6a306d574c8897cd721f84d429972",
"number_of_iterations": 14708638287767651172,
"output": {
"data": "0x08000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
},
},
"signage_point_index": 255,
"cc_slot_end": {
"witness_type": 197,
"witness": "0x30303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030",
"normalized_to_identity": False,
},
"icc_slot_end": {
"witness_type": 197,
"witness": "0x30303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030",
"normalized_to_identity": False,
},
"cc_slot_end_info": {
"challenge": "0x7cbd5905838c1dc2becd00298a5b3a6e42b6a306d574c8897cd721f84d429972",
"number_of_iterations": 14708638287767651172,
"output": {
"data": "0x08000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
},
},
"icc_slot_end_info": {
"challenge": "0x7cbd5905838c1dc2becd00298a5b3a6e42b6a306d574c8897cd721f84d429972",
"number_of_iterations": 14708638287767651172,
"output": {
"data": "0x08000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
},
},
"cc_ip_vdf_info": {
"challenge": "0x7cbd5905838c1dc2becd00298a5b3a6e42b6a306d574c8897cd721f84d429972",
"number_of_iterations": 14708638287767651172,
"output": {
"data": "0x08000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
},
},
"icc_ip_vdf_info": {
"challenge": "0x7cbd5905838c1dc2becd00298a5b3a6e42b6a306d574c8897cd721f84d429972",
"number_of_iterations": 14708638287767651172,
"output": {
"data": "0x08000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
},
},
"total_iters": 178067533887691737655963933428342640848,
}
],
"rc_slot_end_info": {
"challenge": "0x7cbd5905838c1dc2becd00298a5b3a6e42b6a306d574c8897cd721f84d429972",
"number_of_iterations": 14708638287767651172,
"output": {
"data": "0x08000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
},
},
}
],
"recent_chain_data": [
{
"finished_sub_slots": [
{
"challenge_chain": {
"challenge_chain_end_of_slot_vdf": {
"challenge": "0x7cbd5905838c1dc2becd00298a5b3a6e42b6a306d574c8897cd721f84d429972",
"number_of_iterations": 14708638287767651172,
"output": {
"data": "0x08000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
},
},
"infused_challenge_chain_sub_slot_hash": "0x42c10d66108589c11bb3811b37d214b6351b73e25bad6c956c0bf1c05a4d93fb",
"subepoch_summary_hash": "0xcdb6d334b461a01c4d07c76dd71d5a9f3a2949807a3499eb484e4b91e6cea309",
"new_sub_slot_iters": 42556034269004566,
"new_difficulty": 16610212302933121129,
},
"infused_challenge_chain": {
"infused_challenge_chain_end_of_slot_vdf": {
"challenge": "0x7cbd5905838c1dc2becd00298a5b3a6e42b6a306d574c8897cd721f84d429972",
"number_of_iterations": 14708638287767651172,
"output": {
"data": "0x08000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
},
}
},
"reward_chain": {
"end_of_slot_vdf": {
"challenge": "0x7cbd5905838c1dc2becd00298a5b3a6e42b6a306d574c8897cd721f84d429972",
"number_of_iterations": 14708638287767651172,
"output": {
"data": "0x08000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
},
},
"challenge_chain_sub_slot_hash": "0x893f282b27c4961f47d886577a8d7c136d1e738e6c5badd37c1994e68871cb70",
"infused_challenge_chain_sub_slot_hash": "0x4be4cc2a1f15c5c69fb9becac0cbe0df5ea007a94f22bca79f88e14fc2a46def",
"deficit": 52,
},
"proofs": {
"challenge_chain_slot_proof": {
"witness_type": 197,
"witness": "0x30303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030",
"normalized_to_identity": False,
},
"infused_challenge_chain_slot_proof": {
"witness_type": 197,
"witness": "0x30303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030",
"normalized_to_identity": False,
},
"reward_chain_slot_proof": {
"witness_type": 197,
"witness": "0x30303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030",
"normalized_to_identity": False,
},
},
}
],
"reward_chain_block": {
"weight": 187084448821891925757676377381787790114,
"height": 301889038,
"total_iters": 147405131564197136044258885592706844266,
"signage_point_index": 9,
"pos_ss_cc_challenge_hash": "0x50102505a28e3969db19c699a5e53af73c1cb3108e2ab9ce9d86d1f058b10457",
"proof_of_space": {
"challenge": "0x1fb331df88bc142e70c110e21620374118fb220ccc3ef621378197e850882ec9",
"pool_public_key": "0xa04c6b5ac7dfb935f6feecfdd72348ccf1d4be4fe7e26acf271ea3b7d308da61e0a308f7a62495328a81f5147b66634c",
"pool_contract_puzzle_hash": None,
"plot_public_key": "0xb6449c2c68df97c19e884427e42ee7350982d4020571ead08732615ff39bd216bfd630b6460784982bec98b49fea79d0",
"size": 204,
"proof": "0xa67188ae0c02c49b0e821a9773033a3fbd338030c383080dbb8b1d63f07af427d8075e59d911f85ea562fd967823588f9a405a4464fdf5dc0866ee15bebd6b94cb147e28aa9cf96da930611486b779737ed721ea376b9939ba05357141223d75d21b21f310ec32d85ed3b98cf301494ea91b8501138481f3bfa1c384fd998b1fdd2855ac6f0c8554c520fb0bfa3663f238124035e14682bc11eaf7c372b6af4ed7f59a406810c71711906f8c91f94b1f",
},
"challenge_chain_sp_vdf": {
"challenge": "0x7cbd5905838c1dc2becd00298a5b3a6e42b6a306d574c8897cd721f84d429972",
"number_of_iterations": 14708638287767651172,
"output": {
"data": "0x08000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
},
},
"challenge_chain_sp_signature": "0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"challenge_chain_ip_vdf": {
"challenge": "0x7cbd5905838c1dc2becd00298a5b3a6e42b6a306d574c8897cd721f84d429972",
"number_of_iterations": 14708638287767651172,
"output": {
"data": "0x08000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
},
},
"reward_chain_sp_vdf": {
"challenge": "0x7cbd5905838c1dc2becd00298a5b3a6e42b6a306d574c8897cd721f84d429972",
"number_of_iterations": 14708638287767651172,
"output": {
"data": "0x08000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
},
},
"reward_chain_sp_signature": "0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"reward_chain_ip_vdf": {
"challenge": "0x7cbd5905838c1dc2becd00298a5b3a6e42b6a306d574c8897cd721f84d429972",
"number_of_iterations": 14708638287767651172,
"output": {
"data": "0x08000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
},
},
"infused_challenge_chain_ip_vdf": {
"challenge": "0x7cbd5905838c1dc2becd00298a5b3a6e42b6a306d574c8897cd721f84d429972",
"number_of_iterations": 14708638287767651172,
"output": {
"data": "0x08000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
},
},
"is_transaction_block": False,
},
"challenge_chain_sp_proof": {
"witness_type": 197,
"witness": "0x30303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030",
"normalized_to_identity": False,
},
"challenge_chain_ip_proof": {
"witness_type": 197,
"witness": "0x30303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030",
"normalized_to_identity": False,
},
"reward_chain_sp_proof": {
"witness_type": 197,
"witness": "0x30303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030",
"normalized_to_identity": False,
},
"reward_chain_ip_proof": {
"witness_type": 197,
"witness": "0x30303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030",
"normalized_to_identity": False,
},
"infused_challenge_chain_ip_proof": {
"witness_type": 197,
"witness": "0x30303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030",
"normalized_to_identity": False,
},
"foliage": {
"prev_block_hash": "0x312fd3fe7c9a21cd90ce40b567730ab087fa29436bf8568adacc605f52912fba",
"reward_block_hash": "0xba37d30b755680e0b8873a1b7f0ae7636400999ca2b2d32ad0aebb0c24e258aa",
"foliage_block_data": {
"unfinished_reward_block_hash": "0x205be4e4efff5b8d99b3f5c8d0ad19072875b9bac1ec3edda1f0df5467e2e61a",
"pool_target": {
"puzzle_hash": "0xd23da14695a188ae5708dd152263c4db883eb27edeb936178d4d988b8f3ce5fc",
"max_height": 421941852,
},
"pool_signature": "0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"farmer_reward_puzzle_hash": "0x4e62d7ed145b394ce28533e4f0a7d70f339f9d4c49ee717e51e2d6480e5fcbcc",
"extension_data": "0xd53254dcdcbfddb431c3ff89d1a785491663b51552e3847d29e36972f43b536d",
},
"foliage_block_data_signature": "0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"foliage_transaction_block_hash": "0xac6a47ca76efeac93b1c435dfa2e876ab63c0a62fa7aa5a6b8cf9efd95084025",
"foliage_transaction_block_signature": "0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
},
"foliage_transaction_block": {
"prev_transaction_block_hash": "0x852ed117f46fa98af7a17fcb050c369245a30fcffc190177c3a316109d1609c7",
"timestamp": 3871668531533889186,
"filter_hash": "0xffab724c5df9b90c0842565225f5ed842da14f159373c05d63643405ccce84b3",
"additions_root": "0x5f87a17fafb44afd0d6b5b67b77be38570b4bc0150388bd9c176d4ac5d4e693b",
"removals_root": "0xdb967ce278f9bf4fdc77cb9fa82b5b2ce6876746eb5e61f4352a41e3abb63275",
"transactions_info_hash": "0x7eebe3b21505f7c7cb5536e96ab893bfa4626a5cf9c79fadb5dae6913e0a7cb3",
},
"transactions_filter": "0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"transactions_info": {
"generator_root": "0x4cb791379aee03879628f69f16c0d3b78fd865c010c53c3b412dfa56e40f4d78",
"generator_refs_root": "0x180c72ecd6e32986a354681fcf6924aa82c08cfb9df95667fa24442103cc2189",
"aggregated_signature": "0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"fees": 5840504611725889474,
"cost": 7273736876528078474,
"reward_claims_incorporated": [
{
"parent_coin_info": "0xdde12b149d44bafd07390d2ad6ce774ab50d083ada3f0bc3c0adebe6a6a1a4ab",
"puzzle_hash": "0x503da231145145b114e85af933ed86a5834c08323743803ee31fca2b1c64ce15",
"amount": 8428133224333694484,
}
],
},
}
],
},
"tip": "0xbf71d6f1ecae308aacf87db77aeba5a06f5d1099bfc7005529885e1f2dad857f",
}
request_block_json: Dict[str, Any] = {"height": 678860074, "include_transaction_block": False}
reject_block_json: Dict[str, Any] = {"height": 966946253}
request_blocks_json: Dict[str, Any] = {
"start_height": 2578479570,
"end_height": 3884442719,
"include_transaction_block": False,
}
respond_blocks_json: Dict[str, Any] = {
"start_height": 1000,
"end_height": 4201431299,
"blocks": [
{
"finished_sub_slots": [
{
"challenge_chain": {
"challenge_chain_end_of_slot_vdf": {
"challenge": "0x7cbd5905838c1dc2becd00298a5b3a6e42b6a306d574c8897cd721f84d429972",
"number_of_iterations": 14708638287767651172,
"output": {
"data": "0x08000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
},
},
"infused_challenge_chain_sub_slot_hash": "0x42c10d66108589c11bb3811b37d214b6351b73e25bad6c956c0bf1c05a4d93fb",
"subepoch_summary_hash": "0xcdb6d334b461a01c4d07c76dd71d5a9f3a2949807a3499eb484e4b91e6cea309",
"new_sub_slot_iters": 42556034269004566,
"new_difficulty": 16610212302933121129,
},
"infused_challenge_chain": {
"infused_challenge_chain_end_of_slot_vdf": {
"challenge": "0x7cbd5905838c1dc2becd00298a5b3a6e42b6a306d574c8897cd721f84d429972",
"number_of_iterations": 14708638287767651172,
"output": {
"data": "0x08000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
},
}
},
"reward_chain": {
"end_of_slot_vdf": {
"challenge": "0x7cbd5905838c1dc2becd00298a5b3a6e42b6a306d574c8897cd721f84d429972",
"number_of_iterations": 14708638287767651172,
"output": {
"data": "0x08000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
},
},
"challenge_chain_sub_slot_hash": "0x893f282b27c4961f47d886577a8d7c136d1e738e6c5badd37c1994e68871cb70",
"infused_challenge_chain_sub_slot_hash": "0x4be4cc2a1f15c5c69fb9becac0cbe0df5ea007a94f22bca79f88e14fc2a46def",
"deficit": 52,
},
"proofs": {
"challenge_chain_slot_proof": {
"witness_type": 197,
"witness": "0x30303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030",
"normalized_to_identity": False,
},
"infused_challenge_chain_slot_proof": {
"witness_type": 197,
"witness": "0x30303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030",
"normalized_to_identity": False,
},
"reward_chain_slot_proof": {
"witness_type": 197,
"witness": "0x30303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030",
"normalized_to_identity": False,
},
},
}
],
"reward_chain_block": {
"weight": 187084448821891925757676377381787790114,
"height": 301889038,
"total_iters": 147405131564197136044258885592706844266,
"signage_point_index": 9,
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | true |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/test_paginator.py | tests/util/test_paginator.py | from __future__ import annotations
from math import ceil
from typing import List, Type
import pytest
from flax.util.paginator import InvalidPageSizeError, InvalidPageSizeLimit, PageOutOfBoundsError, Paginator
@pytest.mark.parametrize(
"source, page_size, page_size_limit",
[([], 1, 1), ([1], 1, 2), ([1, 2], 2, 2), ([], 10, 100), ([1, 2, 10], 1000, 1000)],
)
def test_constructor_valid_inputs(source: List[int], page_size: int, page_size_limit: int) -> None:
paginator: Paginator = Paginator.create(source, page_size, page_size_limit)
assert paginator.page_size() == page_size
assert paginator.page_count() == 1
assert paginator.get_page(0) == source
@pytest.mark.parametrize(
"page_size, page_size_limit, exception",
[
(5, -1, InvalidPageSizeLimit),
(5, 0, InvalidPageSizeLimit),
(2, 1, InvalidPageSizeError),
(100, 1, InvalidPageSizeError),
(1001, 1000, InvalidPageSizeError),
],
)
def test_constructor_invalid_inputs(page_size: int, page_size_limit: int, exception: Type[Exception]) -> None:
with pytest.raises(exception):
Paginator.create([], page_size, page_size_limit)
def test_page_count() -> None:
for page_size in range(1, 10):
for i in range(0, 10):
assert Paginator.create(range(0, i), page_size).page_count() == max(1, ceil(i / page_size))
@pytest.mark.parametrize(
"length, page_size, page, expected_data",
[
(17, 5, 0, [0, 1, 2, 3, 4]),
(17, 5, 1, [5, 6, 7, 8, 9]),
(17, 5, 2, [10, 11, 12, 13, 14]),
(17, 5, 3, [15, 16]),
(3, 4, 0, [0, 1, 2]),
(3, 3, 0, [0, 1, 2]),
(3, 2, 0, [0, 1]),
(3, 2, 1, [2]),
(3, 1, 0, [0]),
(3, 1, 1, [1]),
(3, 1, 2, [2]),
(2, 2, 0, [0, 1]),
(2, 1, 0, [0]),
(2, 1, 1, [1]),
(1, 2, 0, [0]),
(0, 2, 0, []),
(0, 10, 0, []),
],
)
def test_get_page_valid(length: int, page: int, page_size: int, expected_data: List[int]) -> None:
assert Paginator.create(list(range(0, length)), page_size).get_page(page) == expected_data
@pytest.mark.parametrize("page", [-1000, -10, -1, 5, 10, 1000])
def test_get_page_invalid(page: int) -> None:
with pytest.raises(PageOutOfBoundsError):
Paginator.create(range(0, 17), 5).get_page(page)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/rpc.py | tests/util/rpc.py | from __future__ import annotations
from flax.rpc.rpc_client import RpcClient
from flax.rpc.rpc_server import RpcApiProtocol
async def validate_get_routes(client: RpcClient, api: RpcApiProtocol) -> None:
routes_client = (await client.fetch("get_routes", {}))["routes"]
assert len(routes_client) > 0
routes_api = list(api.get_routes().keys())
# TODO: avoid duplication of RpcServer.get_routes()
routes_server = [
"/get_connections",
"/open_connection",
"/close_connection",
"/stop_node",
"/get_routes",
"/healthz",
]
assert len(routes_api) > 0
assert sorted(routes_client) == sorted(routes_api + routes_server)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/temp_file.py | tests/util/temp_file.py | from __future__ import annotations
import contextlib
import tempfile
from pathlib import Path
from typing import Iterator
@contextlib.contextmanager
def TempFile() -> Iterator[Path]:
path = Path(tempfile.NamedTemporaryFile().name)
yield path
if path.exists():
path.unlink()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/gen_ssl_certs.py | tests/util/gen_ssl_certs.py | from __future__ import annotations
from pathlib import Path
from typing import Optional
import click
from pytest import MonkeyPatch
from flax.ssl.create_ssl import generate_ca_signed_cert, get_flax_ca_crt_key, make_ca_cert
# NOTE: This is a standalone tool that can be used to generate a CA cert/key as well as node certs/keys.
@click.command()
@click.option(
"--suffix",
type=str,
default="",
help="Suffix to append to the generated cert/key symbols.",
required=True,
)
def gen_ssl(suffix: str = "") -> None:
captured_crt: Optional[bytes] = None
captured_key: Optional[bytes] = None
capture_cert_and_key = False
def patched_write_ssl_cert_and_key(cert_path: Path, cert_data: bytes, key_path: Path, key_data: bytes) -> None:
nonlocal capture_cert_and_key, captured_crt, captured_key
if capture_cert_and_key:
captured_crt = cert_data
captured_key = key_data
print(f"{cert_path} = b\"\"\"{cert_data.decode(encoding='utf8')}\"\"\"")
print()
print(f"{key_path} = b\"\"\"{key_data.decode(encoding='utf8')}\"\"\"")
print()
patch = MonkeyPatch()
patch.setattr("flax.ssl.create_ssl.write_ssl_cert_and_key", patched_write_ssl_cert_and_key)
private_ca_crt: Optional[bytes] = None
private_ca_key: Optional[bytes] = None
capture_cert_and_key = True
print("from typing import Dict, Tuple")
print()
make_ca_cert(Path("SSL_TEST_PRIVATE_CA_CRT"), Path("SSL_TEST_PRIVATE_CA_KEY"))
capture_cert_and_key = False
private_ca_crt = captured_crt
private_ca_key = captured_key
node_certs_and_keys = {
"full_node": {
"private": {"crt": "SSL_TEST_FULLNODE_PRIVATE_CRT", "key": "SSL_TEST_FULLNODE_PRIVATE_KEY"},
"public": {"crt": "SSL_TEST_FULLNODE_PUBLIC_CRT", "key": "SSL_TEST_FULLNODE_PUBLIC_KEY"},
},
"wallet": {
"private": {"crt": "SSL_TEST_WALLET_PRIVATE_CRT", "key": "SSL_TEST_WALLET_PRIVATE_KEY"},
"public": {"crt": "SSL_TEST_WALLET_PUBLIC_CRT", "key": "SSL_TEST_WALLET_PUBLIC_KEY"},
},
"farmer": {
"private": {"crt": "SSL_TEST_FARMER_PRIVATE_CRT", "key": "SSL_TEST_FARMER_PRIVATE_KEY"},
"public": {"crt": "SSL_TEST_FARMER_PUBLIC_CRT", "key": "SSL_TEST_FARMER_PUBLIC_KEY"},
},
"harvester": {"private": {"crt": "SSL_TEST_HARVESTER_PRIVATE_CRT", "key": "SSL_TEST_HARVESTER_PRIVATE_KEY"}},
"timelord": {
"private": {"crt": "SSL_TEST_TIMELORD_PRIVATE_CRT", "key": "SSL_TEST_TIMELORD_PRIVATE_KEY"},
"public": {"crt": "SSL_TEST_TIMELORD_PUBLIC_CRT", "key": "SSL_TEST_TIMELORD_PUBLIC_KEY"},
},
"crawler": {"private": {"crt": "SSL_TEST_CRAWLER_PRIVATE_CRT", "key": "SSL_TEST_CRAWLER_PRIVATE_KEY"}},
"daemon": {"private": {"crt": "SSL_TEST_DAEMON_PRIVATE_CRT", "key": "SSL_TEST_DAEMON_PRIVATE_KEY"}},
"introducer": {
"public": {"crt": "SSL_TEST_INTRODUCER_PUBLIC_CRT", "key": "SSL_TEST_INTRODUCER_PUBLIC_KEY"},
},
}
flax_ca_crt, flax_ca_key = get_flax_ca_crt_key()
for node_name, cert_type_dict in node_certs_and_keys.items():
for cert_type, cert_dict in cert_type_dict.items():
crt = cert_dict["crt"]
key = cert_dict["key"]
ca_crt = flax_ca_crt if cert_type == "public" else private_ca_crt
ca_key = flax_ca_key if cert_type == "public" else private_ca_key
generate_ca_signed_cert(ca_crt, ca_key, Path(crt), Path(key))
patch.undo()
append_str = "" if suffix == "" else f"_{suffix}"
print(
f"SSL_TEST_PRIVATE_CA_CERT_AND_KEY{append_str}: Tuple[bytes, bytes] = "
"(SSL_TEST_PRIVATE_CA_CRT, SSL_TEST_PRIVATE_CA_KEY)"
)
print()
print(f"SSL_TEST_NODE_CERTS_AND_KEYS{append_str}: Dict[str, Dict[str, Dict[str, bytes]]] = {{")
for node_name, cert_type_dict in node_certs_and_keys.items():
print(f' "{node_name}": {{')
for cert_type, cert_dict in cert_type_dict.items():
crt = cert_dict["crt"]
key = cert_dict["key"]
print(f' "{cert_type}": {{"crt": {crt}, "key": {key}}},')
print(" },")
print("}")
def main() -> None:
gen_ssl()
if __name__ == "__main__":
main()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/generator_tools_testing.py | tests/util/generator_tools_testing.py | from __future__ import annotations
from typing import List, Tuple
from flax.full_node.mempool_check_conditions import get_name_puzzle_conditions
from flax.types.blockchain_format.coin import Coin
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.full_block import FullBlock
from flax.types.generator_types import BlockGenerator
from flax.util.generator_tools import tx_removals_and_additions
def run_and_get_removals_and_additions(
block: FullBlock, max_cost: int, *, cost_per_byte: int, mempool_mode=False
) -> Tuple[List[bytes32], List[Coin]]:
removals: List[bytes32] = []
additions: List[Coin] = []
assert len(block.transactions_generator_ref_list) == 0
if not block.is_transaction_block():
return [], []
if block.transactions_generator is not None:
npc_result = get_name_puzzle_conditions(
BlockGenerator(block.transactions_generator, [], []),
max_cost,
cost_per_byte=cost_per_byte,
mempool_mode=mempool_mode,
)
assert npc_result.error is None
rem, add = tx_removals_and_additions(npc_result.conds)
# build removals list
removals.extend(rem)
additions.extend(add)
rewards = block.get_included_reward_coins()
additions.extend(rewards)
return removals, additions
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/test_struct_stream.py | tests/util/test_struct_stream.py | from __future__ import annotations
from dataclasses import dataclass
from decimal import Decimal
import struct
import io
from typing import Iterable, List, Optional, Type
import pytest
# TODO: update after resolution in https://github.com/pytest-dev/pytest/issues/7469
from _pytest.mark.structures import ParameterSet
# TODO: update after resolution in https://github.com/pytest-dev/pytest/issues/7469
from _pytest.fixtures import SubRequest
from typing_extensions import final
from flax.util.ints import int8, uint8, int16, uint16, int32, uint32, int64, uint64, uint128, int512
from flax.util.struct_stream import StructStream, parse_metadata_from_name
def dataclass_parameter(instance: object) -> ParameterSet:
return pytest.param(instance, id=repr(instance)[len(type(instance).__name__) + 1 : -1])
def dataclass_parameters(instances: Iterable[object]) -> List[ParameterSet]:
return [dataclass_parameter(instance) for instance in instances]
@dataclass(frozen=True)
class BadName:
name: str
error: str
@final
@dataclass(frozen=True)
class Good:
name: str
cls: Type[StructStream]
size: int
bits: int
signed: bool
maximum_exclusive: int
minimum: int
@classmethod
def create(
cls,
name: str,
size: int,
signed: bool,
maximum_exclusive: int,
minimum: int,
) -> Good:
raw_class: Type[StructStream] = type(name, (StructStream,), {})
parsed_cls = parse_metadata_from_name(raw_class)
return cls(
name=name,
cls=parsed_cls,
size=size,
bits=size * 8,
signed=signed,
maximum_exclusive=maximum_exclusive,
minimum=minimum,
)
good_classes = [
Good.create(name="uint8", size=1, signed=False, maximum_exclusive=0xFF + 1, minimum=0),
Good.create(name="int8", size=1, signed=True, maximum_exclusive=0x80, minimum=-0x80),
Good.create(name="uint16", size=2, signed=False, maximum_exclusive=0xFFFF + 1, minimum=0),
Good.create(name="int16", size=2, signed=True, maximum_exclusive=0x8000, minimum=-0x8000),
Good.create(name="uint24", size=3, signed=False, maximum_exclusive=0xFFFFFF + 1, minimum=0),
Good.create(name="int24", size=3, signed=True, maximum_exclusive=0x800000, minimum=-0x800000),
Good.create(
name="uint128",
size=16,
signed=False,
maximum_exclusive=0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF + 1,
minimum=0,
),
Good.create(
name="int128",
size=16,
signed=True,
maximum_exclusive=0x80000000000000000000000000000000,
minimum=-0x80000000000000000000000000000000,
),
]
@pytest.fixture(
name="good",
params=dataclass_parameters(good_classes),
)
def good_fixture(request: SubRequest) -> Good:
return request.param # type: ignore[no-any-return]
class TestStructStream:
def _test_impl(
self,
cls: Type[StructStream],
upper_boundary: int,
lower_boundary: int,
length: int,
struct_format: Optional[str],
) -> None:
with pytest.raises(ValueError):
t = cls(upper_boundary + 1)
with pytest.raises(ValueError):
t = cls(lower_boundary - 1)
t = cls(upper_boundary)
assert t == upper_boundary
t = cls(lower_boundary)
assert t == lower_boundary
t = cls(0)
assert t == 0
with pytest.raises(ValueError):
cls.parse(io.BytesIO(b"\0" * (length - 1)))
with pytest.raises(ValueError):
cls.from_bytes(b"\0" * (length - 1))
with pytest.raises(ValueError):
cls.from_bytes(b"\0" * (length + 1))
if struct_format is not None:
bytes_io = io.BytesIO()
cls(lower_boundary).stream(bytes_io)
assert bytes_io.getvalue() == struct.pack(struct_format, lower_boundary)
bytes_io = io.BytesIO()
cls(upper_boundary).stream(bytes_io)
assert bytes_io.getvalue() == struct.pack(struct_format, upper_boundary)
with pytest.raises(struct.error):
struct.pack(struct_format, lower_boundary - 1)
with pytest.raises(struct.error):
struct.pack(struct_format, upper_boundary + 1)
def test_int512(self) -> None:
# int512 is special. it uses 65 bytes to allow positive and negative
# "uint512"
self._test_impl(
int512,
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF, # noqa: E501
-0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF, # noqa: E501
length=65,
struct_format=None,
)
def test_uint128(self) -> None:
self._test_impl(uint128, 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF, 0, length=16, struct_format=None)
def test_uint64(self) -> None:
self._test_impl(uint64, 0xFFFFFFFFFFFFFFFF, 0, length=8, struct_format="!Q")
def test_int64(self) -> None:
self._test_impl(int64, 0x7FFFFFFFFFFFFFFF, -0x8000000000000000, length=8, struct_format="!q")
def test_uint32(self) -> None:
self._test_impl(uint32, 0xFFFFFFFF, 0, length=4, struct_format="!L")
def test_int32(self) -> None:
self._test_impl(int32, 0x7FFFFFFF, -0x80000000, length=4, struct_format="!l")
def test_uint16(self) -> None:
self._test_impl(uint16, 0xFFFF, 0, length=2, struct_format="!H")
def test_int16(self) -> None:
self._test_impl(int16, 0x7FFF, -0x8000, length=2, struct_format="!h")
def test_uint8(self) -> None:
self._test_impl(uint8, 0xFF, 0, length=1, struct_format="!B")
def test_int8(self) -> None:
self._test_impl(int8, 0x7F, -0x80, length=1, struct_format="!b")
def test_roundtrip(self) -> None:
def roundtrip(v: StructStream) -> None:
s = io.BytesIO()
v.stream(s)
s.seek(0)
cls = type(v)
v2 = cls.parse(s)
assert v2 == v
# int512 is special. it uses 65 bytes to allow positive and negative
# "uint512"
roundtrip(
int512(
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF # noqa: E501
)
)
roundtrip(
int512(
-0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF # noqa: E501
)
)
roundtrip(uint128(0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF))
roundtrip(uint128(0))
roundtrip(uint64(0xFFFFFFFFFFFFFFFF))
roundtrip(uint64(0))
roundtrip(int64(0x7FFFFFFFFFFFFFFF))
roundtrip(int64(-0x8000000000000000))
roundtrip(uint32(0xFFFFFFFF))
roundtrip(uint32(0))
roundtrip(int32(0x7FFFFFFF))
roundtrip(int32(-0x80000000))
roundtrip(uint16(0xFFFF))
roundtrip(uint16(0))
roundtrip(int16(0x7FFF))
roundtrip(int16(-0x8000))
roundtrip(uint8(0xFF))
roundtrip(uint8(0))
roundtrip(int8(0x7F))
roundtrip(int8(-0x80))
def test_uint32_from_decimal(self) -> None:
assert uint32(Decimal("137")) == 137
def test_uint32_from_float(self) -> None:
assert uint32(4.0) == 4
def test_uint32_from_str(self) -> None:
assert uint32("43") == 43
def test_uint32_from_bytes(self) -> None:
assert uint32(b"273") == 273
def test_struct_stream_cannot_be_instantiated_directly(self) -> None:
with pytest.raises(ValueError, match="does not fit"):
StructStream(0)
@pytest.mark.parametrize(
argnames="bad_name",
argvalues=dataclass_parameters(
instances=[
BadName(name="uint", error="expected integer suffix but got: ''"),
BadName(name="blue", error="expected integer suffix but got"),
BadName(name="blue8", error="expected integer suffix but got: ''"),
BadName(name="sint8", error="expected class name"),
BadName(name="redint8", error="expected class name"),
BadName(name="int7", error="must be a multiple of 8"),
BadName(name="int9", error="must be a multiple of 8"),
BadName(name="int31", error="must be a multiple of 8"),
BadName(name="int0", error="bit size must greater than zero"),
# below could not happen in a hard coded class name, but testing for good measure
BadName(name="int-1", error="bit size must greater than zero"),
],
),
)
def test_parse_metadata_from_name_raises(self, bad_name: BadName) -> None:
cls = type(bad_name.name, (StructStream,), {})
with pytest.raises(ValueError, match=bad_name.error):
parse_metadata_from_name(cls)
def test_parse_metadata_from_name_correct_size(self, good: Good) -> None:
assert good.cls.SIZE == good.size
def test_parse_metadata_from_name_correct_bits(self, good: Good) -> None:
assert good.cls.BITS == good.bits
def test_parse_metadata_from_name_correct_signedness(self, good: Good) -> None:
assert good.cls.SIGNED == good.signed
def test_parse_metadata_from_name_correct_maximum(self, good: Good) -> None:
assert good.cls.MAXIMUM_EXCLUSIVE == good.maximum_exclusive
def test_parse_metadata_from_name_correct_minimum(self, good: Good) -> None:
assert good.cls.MINIMUM == good.minimum
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/alert_server.py | tests/util/alert_server.py | from __future__ import annotations
import argparse
import asyncio
import logging
from pathlib import Path
from typing import Any
from aiohttp import web
log = logging.getLogger(__name__)
class AlertServer:
shut_down: bool
shut_down_event: asyncio.Event
log: Any
app: Any
alert_file_path: Path
port: int
@staticmethod
async def create_alert_server(alert_file_path: Path, port):
self = AlertServer()
self.log = log
self.shut_down = False
self.app = web.Application()
self.shut_down_event = asyncio.Event()
self.port = port
routes = [
web.get("/status", self.status),
]
self.alert_file_path = alert_file_path
self.app.add_routes(routes)
return self
async def status(self, request):
file_text = self.alert_file_path.read_text()
return web.Response(body=file_text, content_type="text/plain")
async def stop(self):
self.shut_down_event.set()
async def run(self):
runner = web.AppRunner(self.app, access_log=None)
await runner.setup()
site = web.TCPSite(runner, None, self.port)
await site.start()
async def run_and_wait(file_path, port):
server = await AlertServer.create_alert_server(Path(file_path), port)
await server.run()
await server.shut_down_event.wait()
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-file_path", type=str, dest="file_path")
parser.add_argument("-port", type=str, dest="port")
port = None
file_path = None
for key, value in vars(parser.parse_args()).items():
if key == "port":
port = value
elif key == "file_path":
file_path = value
else:
print(f"Invalid argument {key}")
if port is None or file_path is None:
print(
"Missing arguments, example usage:\n\n"
"python flax/util/alert_server.py -p 4000 -file_path /home/user/alert.txt\n"
)
quit()
return asyncio.run(run_and_wait(file_path, port))
if __name__ == "__main__":
main()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/network_protocol_data.py | tests/util/network_protocol_data.py | # flake8: noqa
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.ints import uint8, uint16, uint32, uint64, uint128
from flax.types.blockchain_format.proof_of_space import ProofOfSpace
from flax.types.blockchain_format.pool_target import PoolTarget
from flax.types.coin_spend import CoinSpend
from flax.types.blockchain_format.coin import Coin
from flax.types.spend_bundle import SpendBundle
from flax.types.blockchain_format.program import Program, SerializedProgram
from flax.types.blockchain_format.sub_epoch_summary import SubEpochSummary
from flax.types.weight_proof import WeightProof, SubEpochData, SubEpochChallengeSegment, SubSlotData, RecentChainData
from flax.types.blockchain_format.vdf import VDFInfo, VDFProof
from flax.types.blockchain_format.classgroup import ClassgroupElement
from blspy import G1Element, G2Element
from flax.types.header_block import HeaderBlock
from flax.types.full_block import FullBlock
from flax.types.unfinished_block import UnfinishedBlock
from flax.types.blockchain_format.slots import (
ChallengeChainSubSlot,
InfusedChallengeChainSubSlot,
RewardChainSubSlot,
SubSlotProofs,
)
from flax.types.end_of_slot_bundle import EndOfSubSlotBundle
from flax.types.peer_info import TimestampedPeerInfo
from flax.types.blockchain_format.reward_chain_block import RewardChainBlock
from flax.types.blockchain_format.foliage import Foliage, FoliageTransactionBlock, FoliageBlockData, TransactionsInfo
from flax.protocols import (
farmer_protocol,
full_node_protocol,
harvester_protocol,
introducer_protocol,
pool_protocol,
timelord_protocol,
wallet_protocol,
)
### FARMER PROTOCOL
new_signage_point = farmer_protocol.NewSignagePoint(
bytes32(bytes.fromhex("34b2a753b0dc864e7218f8facf23ca0e2b636351df5289b76f5845d9a78b7026")),
bytes32(bytes.fromhex("9dc8b9d685c79acdf8780d994416dfcfb118e0adc99769ecfa94e1f40aa5bbe5")),
bytes32(bytes.fromhex("b2828a2c7f6a2555c80c3ca9d10792a7da6ee80f686122ecd2c748dc0569a867")),
uint64(2329045448547720842),
uint64(8265724497259558930),
uint8(194),
)
proof_of_space = ProofOfSpace(
bytes32(bytes.fromhex("1fb331df88bc142e70c110e21620374118fb220ccc3ef621378197e850882ec9")),
G1Element(
bytes.fromhex(
"a04c6b5ac7dfb935f6feecfdd72348ccf1d4be4fe7e26acf271ea3b7d308da61e0a308f7a62495328a81f5147b66634c"
),
),
None,
G1Element(
bytes.fromhex(
"b6449c2c68df97c19e884427e42ee7350982d4020571ead08732615ff39bd216bfd630b6460784982bec98b49fea79d0"
),
),
uint8(204),
bytes.fromhex(
"a67188ae0c02c49b0e821a9773033a3fbd338030c383080dbb8b1d63f07af427d8075e59d911f85ea562fd967823588f9a405a4464fdf5dc0866ee15bebd6b94cb147e28aa9cf96da930611486b779737ed721ea376b9939ba05357141223d75d21b21f310ec32d85ed3b98cf301494ea91b8501138481f3bfa1c384fd998b1fdd2855ac6f0c8554c520fb0bfa3663f238124035e14682bc11eaf7c372b6af4ed7f59a406810c71711906f8c91f94b1f",
),
)
pool_target = PoolTarget(
bytes32.from_hexstr("d23da14695a188ae5708dd152263c4db883eb27edeb936178d4d988b8f3ce5fc"),
uint32(421941852),
)
g2_element = G2Element(
bytes.fromhex(
"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
)
)
declare_proof_of_space = farmer_protocol.DeclareProofOfSpace(
bytes32(bytes.fromhex("3f44d177faa11cea40477f233a8b365cce77215a84f48f65a37b2ac35c7e3ccc")),
bytes32(bytes.fromhex("931c83fd8ef121177257301e11f41642618ddac65509939e252243e41bacbf78")),
uint8(31),
bytes32(bytes.fromhex("6c8dbcfae52c8df391231f3f7aae24c0b1e2be9638f6fc9e4c216b9ff43548d1")),
proof_of_space,
G2Element(
bytes.fromhex(
"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
)
),
G2Element(
bytes.fromhex(
"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
)
),
bytes32(bytes.fromhex("3843d1c2c574d376225733cf1a9c63da7051954b88b5adc1a4c198c1c7d5edfd")),
pool_target,
G2Element(
bytes.fromhex(
"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
)
),
)
request_signed_values = farmer_protocol.RequestSignedValues(
bytes32(bytes.fromhex("60649de258d2221ca6a178476861b13f8c394a992eaeae1f1159c32bbf703b45")),
bytes32(bytes.fromhex("9da23e943246bb99ebeb5e773e35a445bbbfdbd45dd9b9df169eeca80880a53b")),
bytes32(bytes.fromhex("5d76a4bcb3524d862e92317410583daf50828927885444c6d62ca8843635c46f")),
)
farming_info = farmer_protocol.FarmingInfo(
bytes32(bytes.fromhex("345cefad6a04d3ea4fec4b31e56000de622de9fe861afa53424138dd45307fc2")),
bytes32(bytes.fromhex("1105c288abb976e95804796aea5bb6f66a6b500c0f538d4e71f0d701cad9ff11")),
uint64(16359391077414942762),
uint32(1390832181),
uint32(908923578),
uint32(2259819406),
)
signed_values = farmer_protocol.SignedValues(
bytes32(bytes.fromhex("915de5949724e1fc92d334e589c26ddbcd67415cbbdbbfc5e6de93b3b33bb267")),
G2Element(
bytes.fromhex(
"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
)
),
G2Element(
bytes.fromhex(
"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
)
),
)
### FULL NODE PROTOCOL.
new_peak = full_node_protocol.NewPeak(
bytes32(bytes.fromhex("8a346e8dc02e9b44c0571caa74fd99f163d4c5d7deae9f8ddb00528721493f7a")),
uint32(2653549198),
uint128(196318552117141200341240034145143439804),
uint32(928039765),
bytes32(bytes.fromhex("dd421c55d4edaeeb3ad60e80d73c2005a1b275c381c7e418915200d7467711b5")),
)
new_transaction = full_node_protocol.NewTransaction(
bytes32(bytes.fromhex("e4fe833328d4e82f9c57bc1fc2082c9b63da23e46927522cb5a073f9f0979b6a")),
uint64(13950654730705425115),
uint64(10674036971945712700),
)
request_transaction = full_node_protocol.RequestTransaction(
bytes32(bytes.fromhex("3dc310a07be53bfd701e4a0d77ce39836eeab4717fe25b1ae4c3f16aad0e5d83")),
)
coin_1 = Coin(
bytes32(bytes.fromhex("d56f435d3382cb9aa5f50f51816e4c54487c66402339901450f3c810f1d77098")),
bytes32(bytes.fromhex("9944f63fcc251719b2f04c47ab976a167f96510736dc6fdfa8e037d740f4b5f3")),
uint64(6602327684212801382),
)
serialized_program_1 = SerializedProgram.from_bytes(
bytes.fromhex(
"ff01ffff33ffa0f8912302fb33b8188046662785704afc3dd945074e4b45499a7173946e044695ff8203e880ffff33ffa03eaa52e850322dbc281c6b922e9d8819c7b4120ee054c4aa79db50be516a2bcaff8207d08080"
)
)
serialized_program_2 = SerializedProgram.from_bytes(
bytes.fromhex(
"ffff01ffff33ffa0f8912302fb33b8188046662785704afc3dd945074e4b45499a7173946e044695ff8203e880ffff33ffa03eaa52e850322dbc281c6b922e9d8819c7b4120ee054c4aa79db50be516a2bcaff8207d08080ff8080"
)
)
coin_spend = CoinSpend(
coin_1,
serialized_program_1,
serialized_program_2,
)
coin_spends = [coin_spend]
spend_bundle = SpendBundle(
coin_spends,
G2Element(
bytes.fromhex(
"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
)
),
)
respond_transaction = full_node_protocol.RespondTransaction(spend_bundle)
request_proof_of_weight = full_node_protocol.RequestProofOfWeight(
uint32(1109907246),
bytes32(bytes.fromhex("1fa3bfc747762c6edbe9937630e50b6982c3cf4fd67931f2ffcececb8c509839")),
)
sub_epochs = SubEpochData(
bytes32(bytes.fromhex("6fdcfaabeb149f9c44c80c230c44771e14b3d4e1b361dcca9c823b7ea7887ffe")),
uint8(190),
uint64(10527522631566046685),
uint64(989988965238543242),
)
vdf_info = VDFInfo(
bytes32(bytes.fromhex("7cbd5905838c1dc2becd00298a5b3a6e42b6a306d574c8897cd721f84d429972")),
uint64(14708638287767651172),
ClassgroupElement.get_default_element(),
)
vdf_proof = VDFProof(
uint8(197),
bytes(b"0" * 100),
False,
)
sub_slot_data = SubSlotData(
proof_of_space,
vdf_proof,
vdf_proof,
vdf_proof,
vdf_info,
uint8(255),
vdf_proof,
vdf_proof,
vdf_info,
vdf_info,
vdf_info,
vdf_info,
uint128(178067533887691737655963933428342640848),
)
sub_epoch_challenge_segments = SubEpochChallengeSegment(
uint32(3946877794),
[sub_slot_data],
vdf_info,
)
challenge_chain = ChallengeChainSubSlot(
vdf_info,
bytes32(bytes.fromhex("42c10d66108589c11bb3811b37d214b6351b73e25bad6c956c0bf1c05a4d93fb")),
bytes32(bytes.fromhex("cdb6d334b461a01c4d07c76dd71d5a9f3a2949807a3499eb484e4b91e6cea309")),
uint64(42556034269004566),
uint64(16610212302933121129),
)
infused_challenge_chain = InfusedChallengeChainSubSlot(
vdf_info,
)
reward_chain = RewardChainSubSlot(
vdf_info,
bytes32(bytes.fromhex("893f282b27c4961f47d886577a8d7c136d1e738e6c5badd37c1994e68871cb70")),
bytes32(bytes.fromhex("4be4cc2a1f15c5c69fb9becac0cbe0df5ea007a94f22bca79f88e14fc2a46def")),
uint8(52),
)
proofs = SubSlotProofs(
vdf_proof,
vdf_proof,
vdf_proof,
)
reward_chain_block = RewardChainBlock(
uint128(187084448821891925757676377381787790114),
uint32(301889038),
uint128(147405131564197136044258885592706844266),
uint8(9),
bytes32(bytes.fromhex("50102505a28e3969db19c699a5e53af73c1cb3108e2ab9ce9d86d1f058b10457")),
proof_of_space,
vdf_info,
g2_element,
vdf_info,
vdf_info,
g2_element,
vdf_info,
vdf_info,
False,
)
foliage_block_data = FoliageBlockData(
bytes32(bytes.fromhex("205be4e4efff5b8d99b3f5c8d0ad19072875b9bac1ec3edda1f0df5467e2e61a")),
pool_target,
g2_element,
bytes32(bytes.fromhex("4e62d7ed145b394ce28533e4f0a7d70f339f9d4c49ee717e51e2d6480e5fcbcc")),
bytes32(bytes.fromhex("d53254dcdcbfddb431c3ff89d1a785491663b51552e3847d29e36972f43b536d")),
)
foliage = Foliage(
bytes32(bytes.fromhex("312fd3fe7c9a21cd90ce40b567730ab087fa29436bf8568adacc605f52912fba")),
bytes32(bytes.fromhex("ba37d30b755680e0b8873a1b7f0ae7636400999ca2b2d32ad0aebb0c24e258aa")),
foliage_block_data,
g2_element,
bytes32(bytes.fromhex("ac6a47ca76efeac93b1c435dfa2e876ab63c0a62fa7aa5a6b8cf9efd95084025")),
g2_element,
)
foliage_transaction_block = FoliageTransactionBlock(
bytes32(bytes.fromhex("852ed117f46fa98af7a17fcb050c369245a30fcffc190177c3a316109d1609c7")),
uint64(3871668531533889186),
bytes32(bytes.fromhex("ffab724c5df9b90c0842565225f5ed842da14f159373c05d63643405ccce84b3")),
bytes32(bytes.fromhex("5f87a17fafb44afd0d6b5b67b77be38570b4bc0150388bd9c176d4ac5d4e693b")),
bytes32(bytes.fromhex("db967ce278f9bf4fdc77cb9fa82b5b2ce6876746eb5e61f4352a41e3abb63275")),
bytes32(bytes.fromhex("7eebe3b21505f7c7cb5536e96ab893bfa4626a5cf9c79fadb5dae6913e0a7cb3")),
)
end_of_subslot_bundle = EndOfSubSlotBundle(
challenge_chain,
infused_challenge_chain,
reward_chain,
proofs,
)
transactions_info = TransactionsInfo(
bytes32(bytes.fromhex("4cb791379aee03879628f69f16c0d3b78fd865c010c53c3b412dfa56e40f4d78")),
bytes32(bytes.fromhex("180c72ecd6e32986a354681fcf6924aa82c08cfb9df95667fa24442103cc2189")),
g2_element,
uint64(5840504611725889474),
uint64(7273736876528078474),
[
Coin(
bytes32(bytes.fromhex("dde12b149d44bafd07390d2ad6ce774ab50d083ada3f0bc3c0adebe6a6a1a4ab")),
bytes32(bytes.fromhex("503da231145145b114e85af933ed86a5834c08323743803ee31fca2b1c64ce15")),
uint64(8428133224333694484),
),
],
)
header_block = HeaderBlock(
[end_of_subslot_bundle],
reward_chain_block,
vdf_proof,
vdf_proof,
vdf_proof,
vdf_proof,
vdf_proof,
foliage,
foliage_transaction_block,
bytes([0] * 50),
transactions_info,
)
recent_chain_data = RecentChainData(
[header_block],
)
weight_proof = WeightProof(
[sub_epochs],
[sub_epoch_challenge_segments],
[header_block],
)
respond_proof_of_weight = full_node_protocol.RespondProofOfWeight(
weight_proof,
bytes32(bytes.fromhex("bf71d6f1ecae308aacf87db77aeba5a06f5d1099bfc7005529885e1f2dad857f")),
)
request_block = full_node_protocol.RequestBlock(
uint32(678860074),
False,
)
reject_block = full_node_protocol.RejectBlock(
uint32(966946253),
)
request_blocks = full_node_protocol.RequestBlocks(
uint32(2578479570),
uint32(3884442719),
False,
)
full_block = FullBlock(
[end_of_subslot_bundle],
reward_chain_block,
vdf_proof,
vdf_proof,
vdf_proof,
vdf_proof,
vdf_proof,
foliage,
foliage_transaction_block,
transactions_info,
SerializedProgram.from_bytes(
bytes.fromhex(
"ff01ffff33ffa0f8912302fb33b8188046662785704afc3dd945074e4b45499a7173946e044695ff8203e880ffff33ffa03eaa52e850322dbc281c6b922e9d8819c7b4120ee054c4aa79db50be516a2bcaff8207d08080"
)
),
[uint32(2456207540)],
)
respond_blocks = full_node_protocol.RespondBlocks(uint32(1000), uint32(4201431299), [full_block, full_block])
reject_blocks = full_node_protocol.RejectBlocks(
uint32(1160742782),
uint32(1856800720),
)
respond_block = full_node_protocol.RespondBlock(
full_block,
)
new_unfinished_block = full_node_protocol.NewUnfinishedBlock(
bytes32(bytes.fromhex("229646fb33551966039d9324c0d10166c554d20e9a11e3f30942ec0bb346377e")),
)
request_unfinished_block = full_node_protocol.RequestUnfinishedBlock(
bytes32(bytes.fromhex("8b5e5a59f33bb89e1bfd5aca79409352864e70aa7765c331d641875f83d59d1d")),
)
unfinished_block = UnfinishedBlock(
[end_of_subslot_bundle],
reward_chain_block.get_unfinished(),
vdf_proof,
vdf_proof,
foliage,
foliage_transaction_block,
transactions_info,
SerializedProgram.from_bytes(
bytes.fromhex(
"ff01ffff33ffa0f8912302fb33b8188046662785704afc3dd945074e4b45499a7173946e044695ff8203e880ffff33ffa03eaa52e850322dbc281c6b922e9d8819c7b4120ee054c4aa79db50be516a2bcaff8207d08080"
)
),
[uint32(1862532955)],
)
respond_unfinished_block = full_node_protocol.RespondUnfinishedBlock(unfinished_block)
new_signage_point_or_end_of_subslot = full_node_protocol.NewSignagePointOrEndOfSubSlot(
bytes32(bytes.fromhex("f945510ccea927f832635e56bc20315c92943e108d2b458ac91a290a82e02997")),
bytes32(bytes.fromhex("27a16b348971e5dfb258e7a01f0b300acbecf8339476afd144e8520f1981833b")),
uint8(102),
bytes32(bytes.fromhex("a619471c0ba0b8b8b92b7b2cb1241c2fbb2324c4f1a20a01eb7dcc0027393a56")),
)
request_signage_point_or_end_of_subslot = full_node_protocol.RequestSignagePointOrEndOfSubSlot(
bytes32(bytes.fromhex("edd45b516bf1dc3754c30a99e289639e05f967dc1b590df8a377652bee4f463c")),
uint8(217),
bytes32(bytes.fromhex("b574062b42a5b3d76ea141d3b89a4a1096f7797bafe625770047380448622420")),
)
respond_signage_point = full_node_protocol.RespondSignagePoint(
uint8(111),
vdf_info,
vdf_proof,
vdf_info,
vdf_proof,
)
respond_end_of_subslot = full_node_protocol.RespondEndOfSubSlot(
end_of_subslot_bundle,
)
request_mempool_transaction = full_node_protocol.RequestMempoolTransactions(
bytes([0] * 32),
)
new_compact_vdf = full_node_protocol.NewCompactVDF(
uint32(1333973478),
bytes32(bytes.fromhex("e2188779d4a8e8fdf9cbe3103878b4c3f5f25a999fa8d04551c4ae01046c634e")),
uint8(169),
vdf_info,
)
request_compact_vdf = full_node_protocol.RequestCompactVDF(
uint32(3529778757),
bytes32(bytes.fromhex("1c02dfbf437c464cfd3f71d2da283c22bd04b2061e3c6b4bfd8b859092957d96")),
uint8(207),
vdf_info,
)
respond_compact_vdf = full_node_protocol.RespondCompactVDF(
uint32(2759248594),
bytes32(bytes.fromhex("51f2e23ac76179d69bc9232420f47e2a332b8c2495c24ceef7f730feb53c9117")),
uint8(167),
vdf_info,
vdf_proof,
)
request_peers = full_node_protocol.RequestPeers()
timestamped_peer_info = TimestampedPeerInfo("127.0.0.1", uint16(8444), uint64(10796))
respond_peers = full_node_protocol.RespondPeers([timestamped_peer_info])
## WALLET PROTOCOL
request_puzzle_solution = wallet_protocol.RequestPuzzleSolution(
bytes32(bytes.fromhex("6edddb46bd154f50566b49c95812e0f1131a0a7162630349fc8d1d696e463e47")),
uint32(3905474497),
)
program = Program.fromhex(
"ff01ffff33ffa0f8912302fb33b8188046662785704afc3dd945074e4b45499a7173946e044695ff8203e880ffff33ffa03eaa52e850322dbc281c6b922e9d8819c7b4120ee054c4aa79db50be516a2bcaff8207d08080"
)
puzzle_solution_response = wallet_protocol.PuzzleSolutionResponse(
bytes32(bytes.fromhex("45c4451fdeef92aa0706def2448adfaed8e4a1c0b08a6d303c57de661509c442")),
uint32(3776325015),
program,
program,
)
respond_puzzle_solution = wallet_protocol.RespondPuzzleSolution(
puzzle_solution_response,
)
reject_puzzle_solution = wallet_protocol.RejectPuzzleSolution(
bytes32(bytes.fromhex("2f16254e8e7a0b3fbe7bc709d29c5e7d2daa23ce1a2964e3f77b9413055029dd")),
uint32(2039721496),
)
send_transaction = wallet_protocol.SendTransaction(
spend_bundle,
)
transaction_ack = wallet_protocol.TransactionAck(
bytes32(bytes.fromhex("fc30d2df70f4ca0a138d5135d352611ddf268ea46c59cde48c29c43d9472532c")),
uint8(30),
"None",
)
new_peak_wallet = wallet_protocol.NewPeakWallet(
bytes32(bytes.fromhex("ee50e45652cb6a60e3ab0031aa425a6019648fe5344ae860e6fc14af1aa3c2fa")),
uint32(1093428752),
uint128(207496292293729126634170184354599452208),
uint32(133681371),
)
request_block_header = wallet_protocol.RequestBlockHeader(
uint32(3562957314),
)
request_block_headers = wallet_protocol.RequestBlockHeaders(
uint32(1234970524),
uint32(234653234),
False,
)
respond_header_block = wallet_protocol.RespondBlockHeader(
header_block,
)
respond_block_headers = wallet_protocol.RespondBlockHeaders(
uint32(923662371),
uint32(992357623),
[header_block],
)
reject_header_request = wallet_protocol.RejectHeaderRequest(
uint32(17867635),
)
request_removals = wallet_protocol.RequestRemovals(
uint32(3500751918),
bytes32(bytes.fromhex("b44bc0e0fce20331a57081107dfd30ef39fc436e6e6ce4f6f0ab8db4f981d114")),
[bytes32(bytes.fromhex("ab62cfb2abaf9e1a475b707c3d3de35d6ef4a298b31137802fd9ea47d48ff0d5"))],
)
respond_removals = wallet_protocol.RespondRemovals(
uint32(461268095),
bytes32(bytes.fromhex("e2db23a6484b05d9ae1033efe8dcfcf5894fc600a6b93b03782fab8dd1cba8a4")),
[(bytes32(bytes.fromhex("f800ab7a0d1598c473e31700b21a7cc590c1619f10e72a707d1c66f090e4e078")), coin_1)],
[(bytes32(bytes.fromhex("652c312e1dd9f32bf074e17ae8b658bf47711bd1a5e6c937adfb0c80b51fa49d")), bytes(b"a" * 10))],
)
reject_removals_request = wallet_protocol.RejectRemovalsRequest(
uint32(3247661701),
bytes32(bytes.fromhex("d5eee2d2ad56663c1c1d1cbde69329862dcf29010683aa7a0da91712d6876caf")),
)
request_additions = wallet_protocol.RequestAdditions(
uint32(2566479739),
bytes32(bytes.fromhex("17262e35437ddc95d43431d20657c096cff95f7ba93a39367f56f1f9df0f0277")),
[bytes32(bytes.fromhex("6fc7b72bc37f462dc820d4b39c9e69e9e65b590ee1a6b0a06b5105d048c278d4"))],
)
respond_additions = wallet_protocol.RespondAdditions(
uint32(1992350400),
bytes32(bytes.fromhex("449ba349ce403c1acfcd46108758e7ada3a455e7a82dbee90860ec73adb090c9")),
[(bytes32(bytes.fromhex("ed8daaf9233ed82e773ef4d1e89f2958fec0570137cf2c267ae22099ab43a9a4")), [coin_1, coin_1])],
[
(
bytes32(bytes.fromhex("8bb1381ff8ee01944d6d6c7e2df4b2fc84343a0c6c0fb93e8ef6d75e5c8b3048")),
bytes(b"a" * 10),
bytes(b"a" * 10),
)
],
)
reject_additions = wallet_protocol.RejectAdditionsRequest(
uint32(3457211200),
bytes32(bytes.fromhex("4eb659e6dd727bc22191795692aae576922e56ae309871c352eede0c9dd8bb12")),
)
request_header_blocks = wallet_protocol.RequestHeaderBlocks(
uint32(2858301848),
uint32(720941539),
)
reject_header_blocks = wallet_protocol.RejectHeaderBlocks(
uint32(876520264),
uint32(2908717391),
)
reject_block_headers = wallet_protocol.RejectBlockHeaders(
uint32(543373229),
uint32(2347869036),
)
respond_header_blocks = wallet_protocol.RespondHeaderBlocks(
uint32(4130100992),
uint32(17664086),
[header_block],
)
coin_state = wallet_protocol.CoinState(
coin_1,
uint32(2287030048),
uint32(3361305811),
)
register_for_ph_updates = wallet_protocol.RegisterForPhUpdates(
[bytes32(bytes.fromhex("df24b7dc1d5ffa12f112e198cd26385b5ab302b5c2e5f9d589e5cd3f7b900510"))],
uint32(874269130),
)
respond_to_ph_updates = wallet_protocol.RespondToPhUpdates(
[bytes32(bytes.fromhex("1be3bdc54b84901554e4e843966cfa3be3380054c968bebc41cc6be4aa65322f"))],
uint32(3664709982),
[coin_state],
)
register_for_coin_updates = wallet_protocol.RegisterForCoinUpdates(
[bytes32(bytes.fromhex("1d7748531ece395e8bb8468b112d4ccdd1cea027359abd03c0b015edf666eec8"))],
uint32(3566185528),
)
respond_to_coin_updates = wallet_protocol.RespondToCoinUpdates(
[bytes32(bytes.fromhex("db8bad6bd9de34d4884380176135f31a655dca18e9a5fadfb567145b81b6a9e0"))],
uint32(3818814774),
[coin_state],
)
coin_state_update = wallet_protocol.CoinStateUpdate(
uint32(855344561),
uint32(1659753011),
bytes32(bytes.fromhex("8512cc80a2976c81186e8963bc7af9d6d5732ccae5227fffee823f0bf3081e76")),
[coin_state],
)
request_children = wallet_protocol.RequestChildren(
bytes32(bytes.fromhex("15beeed2e6dd0cf1b81a3f68a49845c020912218e4c1f002a1b3f43333495478")),
)
respond_children = wallet_protocol.RespondChildren(
[coin_state],
)
request_ses_info = wallet_protocol.RequestSESInfo(
uint32(2704205398),
uint32(2050258406),
)
respond_ses_info = wallet_protocol.RespondSESInfo(
[bytes32(bytes.fromhex("b61cb91773995e99cb8259609c0985f915a5734a1706aeab9342a2d1c5abf71b"))],
[[uint32(1), uint32(2), uint32(3)], [uint32(4), uint32(606340525)]],
)
### HARVESTER PROTOCOL
pool_difficulty = harvester_protocol.PoolDifficulty(
uint64(14819251421858580996),
uint64(12852879676624401630),
bytes32(bytes.fromhex("c9423123ea65e6923e973b95531b4874570dae942cb757a2daec4a6971753886")),
)
harvester_handhsake = harvester_protocol.HarvesterHandshake(
[
G1Element(
bytes.fromhex(
"a04c6b5ac7dfb935f6feecfdd72348ccf1d4be4fe7e26acf271ea3b7d308da61e0a308f7a62495328a81f5147b66634c"
),
),
],
[
G1Element(
bytes.fromhex(
"a04c6b5ac7dfb935f6feecfdd72348ccf1d4be4fe7e26acf271ea3b7d308da61e0a308f7a62495328a81f5147b66634c"
),
),
],
)
new_signage_point_harvester = harvester_protocol.NewSignagePointHarvester(
bytes32(bytes.fromhex("e342c21b4aeaa52349d42492be934692db58494ca9bce4a8697d06fdf8e583bb")),
uint64(15615706268399948682),
uint64(10520767421667792980),
uint8(148),
bytes32(bytes.fromhex("b78c9fca155e9742df835cbe84bb7e518bee70d78b6be6e39996c0a02e0cfe4c")),
[pool_difficulty],
)
new_proof_of_space = harvester_protocol.NewProofOfSpace(
bytes32(bytes.fromhex("1b64ec6bf3fe33bb80eca5b64ff1c88be07771eaed1e98a7199510522087e56e")),
bytes32(bytes.fromhex("ad1f8a74376ce8c5c93b7fbb355c2fb6d689ae4f4a7134166593d95265a3da30")),
"plot_1",
proof_of_space,
uint8(160),
)
request_signatures = harvester_protocol.RequestSignatures(
"plot_1",
bytes32(bytes.fromhex("b5fa873020fa8b959d89bc2ffc5797501bf870ac8b30437cd6b4fcdea0812789")),
bytes32(bytes.fromhex("bccb7744192771f3a7abca2bce6ea03ed53f1f0d991c13bd2711ce32a2fb3777")),
[bytes32(bytes.fromhex("3fc12545f50a9f0621371688f60b29eff05805dd51b42c90063f5e3c6698fc75"))],
)
respond_signatures = harvester_protocol.RespondSignatures(
"plot_1",
bytes32(bytes.fromhex("59468dce63b5b08490ec4eec4c461fc84b69b6f80a64f4c76b0d55780f7e7e7a")),
bytes32(bytes.fromhex("270b5fc00545db714077aba3b60245d769f492563f108a73b2b8502503d12b9e")),
G1Element(
bytes.fromhex(
"a04c6b5ac7dfb935f6feecfdd72348ccf1d4be4fe7e26acf271ea3b7d308da61e0a308f7a62495328a81f5147b66634c"
),
),
G1Element(
bytes.fromhex(
"a04c6b5ac7dfb935f6feecfdd72348ccf1d4be4fe7e26acf271ea3b7d308da61e0a308f7a62495328a81f5147b66634c"
),
),
[(bytes32(bytes.fromhex("c32fd5310f5e8623697561930dca73cb9da5b3ddb903f52818724bb3bdd9349c")), g2_element)],
)
plot = harvester_protocol.Plot(
"plot_1",
uint8(124),
bytes32(bytes.fromhex("b2eb7e5c5239e8610a9dd0e137e185966ebb430faf31ae4a0e55d86251065b98")),
G1Element(
bytes.fromhex(
"a04c6b5ac7dfb935f6feecfdd72348ccf1d4be4fe7e26acf271ea3b7d308da61e0a308f7a62495328a81f5147b66634c"
),
),
bytes32(bytes.fromhex("1c96d26def7be696f12e7ebb91d50211e6217ce5d9087c9cd1b84782d5d4b237")),
G1Element(
bytes.fromhex(
"a04c6b5ac7dfb935f6feecfdd72348ccf1d4be4fe7e26acf271ea3b7d308da61e0a308f7a62495328a81f5147b66634c"
),
),
uint64(3368414292564311420),
uint64(2573238947935295522),
)
request_plots = harvester_protocol.RequestPlots()
respond_plots = harvester_protocol.RespondPlots(
[plot],
["str"],
["str"],
)
### INTRODUCER PROTOCOL
request_peers_introducer = introducer_protocol.RequestPeersIntroducer()
respond_peers_introducer = introducer_protocol.RespondPeersIntroducer(
[
TimestampedPeerInfo(
"127.0.0.1",
uint16(49878),
uint64(15079028934557257795),
)
]
)
### POOL PROTOCOL
authentication_payload = pool_protocol.AuthenticationPayload(
"method",
bytes32(bytes.fromhex("0251e3b3a1aacc689091b6b085be7a8d319bd9d1a015faae969cb76d8a45607c")),
bytes32(bytes.fromhex("9de241b508b5e9e2073b7645291cfaa9458d33935340399a861acf2ee1770440")),
uint64(4676522834655707230),
)
get_pool_info_response = pool_protocol.GetPoolInfoResponse(
"pool_name",
"pool_name",
uint64(7020711482626732214),
uint32(3407308703),
uint8(129),
"fee",
"pool description.",
bytes32(bytes.fromhex("f6b5120ff1ab7ba661e3b2c91c8b373a8aceea8e4eb6ce3f085f3e80a8655b36")),
uint8(76),
)
post_partial_payload = pool_protocol.PostPartialPayload(
bytes32(bytes.fromhex("dada61e179e67e5e8bc7aaab16e192facf0f15871f0c479d2a96ac5f85721a1a")),
uint64(2491521039628830788),
proof_of_space,
bytes32(bytes.fromhex("929287fab514e2204808821e2afe8c4d84f0093c75554b067fe4fca272890c9d")),
False,
bytes32(bytes.fromhex("f98dff6bdcc3926b33cb8ab22e11bd15c13d6a9b6832ac948b3273f5ccd8e7ec")),
)
post_partial_request = pool_protocol.PostPartialRequest(
post_partial_payload,
g2_element,
)
post_partial_response = pool_protocol.PostPartialResponse(
uint64(5956480724816802941),
)
get_farmer_response = pool_protocol.GetFarmerResponse(
G1Element(
bytes.fromhex(
"a04c6b5ac7dfb935f6feecfdd72348ccf1d4be4fe7e26acf271ea3b7d308da61e0a308f7a62495328a81f5147b66634c"
),
),
"instructions",
uint64(8362834206591090467),
uint64(14310455844127802841),
)
post_farmer_payload = pool_protocol.PostFarmerPayload(
bytes32(bytes.fromhex("d3785b251b4e066f87784d06afc8e6ac8dac5a4922d994902c1bad60b5fa7ad3")),
uint64(5820795488800541986),
G1Element(
bytes.fromhex(
"a04c6b5ac7dfb935f6feecfdd72348ccf1d4be4fe7e26acf271ea3b7d308da61e0a308f7a62495328a81f5147b66634c"
),
),
"payout_instructions",
uint64(1996244065095983466),
)
post_farmer_request = pool_protocol.PostFarmerRequest(
post_farmer_payload,
g2_element,
)
post_farmer_response = pool_protocol.PostFarmerResponse(
"welcome",
)
put_farmer_payload = pool_protocol.PutFarmerPayload(
bytes32(bytes.fromhex("78aec4d523b0bea49829a1322d5de92a86a553ce8774690b8c8ad5fc1f7540a8")),
uint64(15049374353843709257),
G1Element(
bytes.fromhex(
"a04c6b5ac7dfb935f6feecfdd72348ccf1d4be4fe7e26acf271ea3b7d308da61e0a308f7a62495328a81f5147b66634c"
),
),
"payload",
uint64(201241879360854600),
)
put_farmer_request = pool_protocol.PutFarmerRequest(
put_farmer_payload,
g2_element,
)
put_farmer_response = pool_protocol.PutFarmerResponse(
False,
False,
True,
)
error_response = pool_protocol.ErrorResponse(
uint16(47018),
"err",
)
### TIMELORD PROTOCOL
sub_epoch_summary = SubEpochSummary(
bytes32(bytes.fromhex("2d0550de416467e7b57e56e962c712b79bee29cae29c73cc908da5978fc9789e")),
bytes32(bytes.fromhex("3d29f5a3fe067ce7edea76c9cebaf3a3afdebc0eb9fbd530f807f1a28ed2df6d")),
uint8(4),
uint64(14666749803532899046),
uint64(10901191956946573440),
)
new_peak_timelord = timelord_protocol.NewPeakTimelord(
reward_chain_block,
uint64(7661623532867338566),
uint8(202),
uint64(16623089924886538940),
sub_epoch_summary,
[
(
bytes32(bytes.fromhex("5bb65d8662d561ed2fc17e4177ba61c43017ee7e5418091d38968e36ce380d11")),
uint128(134240022887890669757150210097251845335),
)
],
uint128(42058411995615810488183751196800190575),
True,
)
new_unfinished_block_timelord = timelord_protocol.NewUnfinishedBlockTimelord(
reward_chain_block.get_unfinished(),
uint64(601152037470280666),
uint64(14270340639924562415),
foliage,
sub_epoch_summary,
bytes32(bytes.fromhex("0f90296b605904a794e4e98852e3b22e0d9bee2fa07abb12df6cecbdb778e1e5")),
)
new_infusion_point_vdf = timelord_protocol.NewInfusionPointVDF(
bytes32(bytes.fromhex("3d3b977d3a3dab50f0cd72b74b2f08f5018fb5ef826a8773161b7a499dafa60f")),
vdf_info,
vdf_proof,
vdf_info,
vdf_proof,
vdf_info,
vdf_proof,
)
new_signage_point_vdf = timelord_protocol.NewSignagePointVDF(
uint8(182),
vdf_info,
vdf_proof,
vdf_info,
vdf_proof,
)
new_end_of_sub_slot_bundle = timelord_protocol.NewEndOfSubSlotVDF(
end_of_subslot_bundle,
)
request_compact_proof_of_time = timelord_protocol.RequestCompactProofOfTime(
vdf_info,
bytes32(bytes.fromhex("ad71f7e66dc12c4fd7dca7d0c7b4e1825dfd55b93dd590111d2c44bc4f4d66de")),
uint32(4134186845),
uint8(237),
)
respond_compact_proof_of_time = timelord_protocol.RespondCompactProofOfTime(
vdf_info,
vdf_proof,
bytes32(bytes.fromhex("071bef40d098cfadc2614d8b57db924788f7f2ea0fde8cf4bfaeae2894caa442")),
uint32(386395693),
uint8(224),
)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/db_connection.py | tests/util/db_connection.py | from pathlib import Path
from flax.util.db_wrapper import DBWrapper2
import tempfile
class DBConnection:
def __init__(self, db_version: int) -> None:
self.db_version = db_version
async def __aenter__(self) -> DBWrapper2:
self.db_path = Path(tempfile.NamedTemporaryFile().name)
if self.db_path.exists():
self.db_path.unlink()
self._db_wrapper = await DBWrapper2.create(database=self.db_path, reader_count=4, db_version=self.db_version)
return self._db_wrapper
async def __aexit__(self, exc_t, exc_v, exc_tb) -> None:
await self._db_wrapper.close()
self.db_path.unlink()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/misc.py | tests/util/misc.py | from __future__ import annotations
import contextlib
import dataclasses
import enum
import gc
import math
from concurrent.futures import Future
from inspect import getframeinfo, stack
from statistics import mean
from textwrap import dedent
from time import thread_time
from types import TracebackType
from typing import Callable, Iterator, List, Optional, Type, Union
import pytest
from typing_extensions import final
class GcMode(enum.Enum):
nothing = enum.auto
precollect = enum.auto
disable = enum.auto
enable = enum.auto
@contextlib.contextmanager
def manage_gc(mode: GcMode) -> Iterator[None]:
if mode == GcMode.precollect:
gc.collect()
yield
elif mode == GcMode.disable:
was_enabled = gc.isenabled()
gc.disable()
try:
yield
finally:
if was_enabled:
gc.enable()
elif mode == GcMode.enable:
was_enabled = gc.isenabled()
gc.enable()
try:
yield
finally:
if not was_enabled:
gc.disable()
def caller_file_and_line(distance: int = 1) -> str:
caller = getframeinfo(stack()[distance + 1][0])
return f"{caller.filename}:{caller.lineno}"
@dataclasses.dataclass(frozen=True)
class RuntimeResults:
start: float
end: float
duration: float
entry_line: str
overhead: float
def block(self, label: str = "") -> str:
# The entry line is reported starting at the beginning of the line to trigger
# PyCharm to highlight as a link to the source.
return dedent(
f"""\
Measuring runtime: {label}
{self.entry_line}
run time: {self.duration}
overhead: {self.overhead}
"""
)
@final
@dataclasses.dataclass(frozen=True)
class AssertRuntimeResults:
start: float
end: float
duration: float
entry_line: str
overhead: float
limit: float
ratio: float
@classmethod
def from_runtime_results(
cls, results: RuntimeResults, limit: float, entry_line: str, overhead: float
) -> AssertRuntimeResults:
return cls(
start=results.start,
end=results.end,
duration=results.duration,
limit=limit,
ratio=results.duration / limit,
entry_line=entry_line,
overhead=overhead,
)
def block(self, label: str = "") -> str:
# The entry line is reported starting at the beginning of the line to trigger
# PyCharm to highlight as a link to the source.
return dedent(
f"""\
Asserting maximum duration: {label}
{self.entry_line}
run time: {self.duration}
overhead: {self.overhead}
allowed: {self.limit}
percent: {self.percent_str()}
"""
)
def message(self) -> str:
return f"{self.duration} seconds not less than {self.limit} seconds ( {self.percent_str()} )"
def passed(self) -> bool:
return self.duration < self.limit
def percent(self) -> float:
return self.ratio * 100
def percent_str(self) -> str:
return f"{self.percent():.0f} %"
def measure_overhead(
manager_maker: Callable[
[], contextlib.AbstractContextManager[Union[Future[RuntimeResults], Future[AssertRuntimeResults]]]
],
cycles: int = 10,
) -> float:
times: List[float] = []
for _ in range(cycles):
with manager_maker() as results:
pass
times.append(results.result(timeout=0).duration)
overhead = mean(times)
return overhead
@contextlib.contextmanager
def measure_runtime(
label: str = "",
clock: Callable[[], float] = thread_time,
gc_mode: GcMode = GcMode.disable,
calibrate: bool = True,
print_results: bool = True,
) -> Iterator[Future[RuntimeResults]]:
entry_line = caller_file_and_line()
def manager_maker() -> contextlib.AbstractContextManager[Future[RuntimeResults]]:
return measure_runtime(clock=clock, gc_mode=gc_mode, calibrate=False, print_results=False)
if calibrate:
overhead = measure_overhead(manager_maker=manager_maker)
else:
overhead = 0
results_future: Future[RuntimeResults] = Future()
with manage_gc(mode=gc_mode):
start = clock()
try:
yield results_future
finally:
end = clock()
duration = end - start
duration -= overhead
results = RuntimeResults(
start=start,
end=end,
duration=duration,
entry_line=entry_line,
overhead=overhead,
)
results_future.set_result(results)
if print_results:
print(results.block(label=label))
@final
@dataclasses.dataclass
class _AssertRuntime:
"""Prepare for, measure, and assert about the time taken by code in the context.
Defaults are set for single-threaded CPU usage timing without garbage collection.
In general, there is no generally correct setup for benchmarking. Only measuring
a single thread's time using the CPU is not very useful for multithreaded or
multiprocessed code. Disabling garbage collection, or forcing it ahead of time,
makes the benchmark not identify any issues the code may introduce in terms of
actually causing relevant gc slowdowns. And so on...
Produces output of the following form.
Asserting maximum duration: full block
/home/altendky/repos/flax-blockchain/tests/core/full_node/test_performance.py:187
run time: 0.027789528900002837
allowed: 0.1
percent: 28 %
"""
# A class is only being used here, to make __tracebackhide__ work.
# https://github.com/pytest-dev/pytest/issues/2057
seconds: float
label: str = ""
clock: Callable[[], float] = thread_time
gc_mode: GcMode = GcMode.disable
calibrate: bool = True
print: bool = True
overhead: float = 0
entry_line: Optional[str] = None
_results: Optional[AssertRuntimeResults] = None
runtime_manager: Optional[contextlib.AbstractContextManager[Future[RuntimeResults]]] = None
runtime_results_callable: Optional[Future[RuntimeResults]] = None
def __enter__(self) -> Future[AssertRuntimeResults]:
self.entry_line = caller_file_and_line()
if self.calibrate:
def manager_maker() -> contextlib.AbstractContextManager[Future[AssertRuntimeResults]]:
return dataclasses.replace(self, seconds=math.inf, calibrate=False, print=False)
self.overhead = measure_overhead(manager_maker=manager_maker)
self.runtime_manager = measure_runtime(
clock=self.clock, gc_mode=self.gc_mode, calibrate=False, print_results=False
)
self.runtime_results_callable = self.runtime_manager.__enter__()
self.results_callable: Future[AssertRuntimeResults] = Future()
return self.results_callable
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc: Optional[BaseException],
traceback: Optional[TracebackType],
) -> None:
if self.entry_line is None or self.runtime_manager is None or self.runtime_results_callable is None:
raise Exception("Context manager must be entered before exiting")
self.runtime_manager.__exit__(exc_type, exc, traceback)
runtime = self.runtime_results_callable.result(timeout=0)
results = AssertRuntimeResults.from_runtime_results(
results=runtime,
limit=self.seconds,
entry_line=self.entry_line,
overhead=self.overhead,
)
self.results_callable.set_result(results)
if self.print:
print(results.block(label=self.label))
if exc_type is None:
__tracebackhide__ = True
assert runtime.duration < self.seconds, results.message()
# Related to the comment above about needing a class vs. using the context manager
# decorator, this is just here to retain the function-style naming as the public
# interface. Hopefully we can switch away from the class at some point.
assert_runtime = _AssertRuntime
@contextlib.contextmanager
def assert_rpc_error(error: str) -> Iterator[None]:
with pytest.raises(ValueError) as exception_info:
yield
assert error in exception_info.value.args[0]["error"]
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/test_lock_queue.py | tests/util/test_lock_queue.py | import asyncio
import logging
import time
from asyncio import CancelledError
import pytest
from flax.full_node.lock_queue import LockQueue, LockClient
log = logging.getLogger(__name__)
class TestLockQueue:
@pytest.mark.asyncio
async def test_lock_queue(self):
lock = asyncio.Lock()
queue = LockQueue(lock)
low_priority_client = LockClient(1, queue)
high_priority_client = LockClient(0, queue)
async def very_slow_func():
await asyncio.sleep(2)
raise CancelledError()
async def slow_func():
for i in range(100):
await asyncio.sleep(0.01)
async def kind_of_slow_func():
for i in range(100):
await asyncio.sleep(0.001)
async def do_high():
nonlocal high_priority_client
for i in range(10):
log.warning("Starting high")
t1 = time.time()
async with high_priority_client:
log.warning(f"Spend {time.time() - t1} waiting for high")
await slow_func()
async def do_low(i: int):
nonlocal low_priority_client
log.warning(f"Starting low {i}")
t1 = time.time()
async with low_priority_client:
log.warning(f"Spend {time.time() - t1} waiting for low {i}")
await kind_of_slow_func()
h = asyncio.create_task(do_high())
l_tasks = []
for i in range(50):
l_tasks.append(asyncio.create_task(do_low(i)))
winner = None
while True:
if h.done():
if winner is None:
winner = "h"
l_finished = True
for t in l_tasks:
if not t.done():
l_finished = False
if l_finished and winner is None:
winner = "l"
if l_finished and h.done():
break
await asyncio.sleep(1)
assert winner == "h"
queue.close()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/test_chunks.py | tests/util/test_chunks.py | from __future__ import annotations
from flax.util.chunks import chunks
def test_chunks() -> None:
assert list(chunks([], 0)) == []
assert list(chunks(["a"], 0)) == [["a"]]
assert list(chunks(["a", "b"], 0)) == [["a"], ["b"]]
assert list(chunks(["a", "b", "c", "d"], -1)) == [["a"], ["b"], ["c"], ["d"]]
assert list(chunks(["a", "b", "c", "d"], 0)) == [["a"], ["b"], ["c"], ["d"]]
assert list(chunks(["a", "b", "c", "d"], 1)) == [["a"], ["b"], ["c"], ["d"]]
assert list(chunks(["a", "b", "c", "d"], 2)) == [["a", "b"], ["c", "d"]]
assert list(chunks(["a", "b", "c", "d"], 3)) == [["a", "b", "c"], ["d"]]
assert list(chunks(["a", "b", "c", "d"], 4)) == [["a", "b", "c", "d"]]
assert list(chunks(["a", "b", "c", "d"], 200)) == [["a", "b", "c", "d"]]
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/wallet_is_synced.py | tests/util/wallet_is_synced.py | from __future__ import annotations
from typing import List
from flax.full_node.full_node_api import FullNodeAPI
from flax.wallet.wallet_node import WalletNode
async def wallet_is_synced(wallet_node: WalletNode, full_node_api: FullNodeAPI) -> bool:
wallet_height = await wallet_node.wallet_state_manager.blockchain.get_finished_sync_up_to()
full_node_height = full_node_api.full_node.blockchain.get_peak_height()
has_pending_queue_items = wallet_node.new_peak_queue.has_pending_data_process_items()
return wallet_height == full_node_height and not has_pending_queue_items
async def wallets_are_synced(wns: List[WalletNode], full_node_api: FullNodeAPI) -> bool:
return all([await wallet_is_synced(wn, full_node_api) for wn in wns])
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/config.py | tests/util/config.py | from __future__ import annotations
parallel = True
job_timeout = 60
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/key_tool.py | tests/util/key_tool.py | from typing import List
from blspy import AugSchemeMPL, G2Element, PrivateKey
from flax.types.coin_spend import CoinSpend
from flax.util.condition_tools import conditions_by_opcode, conditions_for_solution, pkm_pairs_for_conditions_dict
from tests.core.make_block_generator import GROUP_ORDER, int_to_public_key
from flax.simulator.block_tools import test_constants
class KeyTool(dict):
@classmethod
def __new__(cls, *args):
return dict.__new__(*args)
def add_secret_exponents(self, secret_exponents: List[int]) -> None:
for _ in secret_exponents:
self[bytes(int_to_public_key(_))] = _ % GROUP_ORDER
def sign(self, public_key: bytes, message: bytes) -> G2Element:
secret_exponent = self.get(public_key)
if not secret_exponent:
raise ValueError("unknown pubkey %s" % public_key.hex())
bls_private_key = PrivateKey.from_bytes(secret_exponent.to_bytes(32, "big"))
return AugSchemeMPL.sign(bls_private_key, message)
def signature_for_solution(self, coin_spend: CoinSpend, additional_data: bytes) -> AugSchemeMPL:
signatures = []
err, conditions, cost = conditions_for_solution(
coin_spend.puzzle_reveal, coin_spend.solution, test_constants.MAX_BLOCK_COST_CLVM
)
assert conditions is not None
conditions_dict = conditions_by_opcode(conditions)
for public_key, message in pkm_pairs_for_conditions_dict(
conditions_dict, coin_spend.coin.name(), additional_data
):
signature = self.sign(public_key, message)
signatures.append(signature)
return AugSchemeMPL.aggregate(signatures)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/__init__.py | tests/util/__init__.py | python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false | |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/test_network.py | tests/util/test_network.py | import os
import sys
import pytest
from flax.util.network import get_host_addr
class TestNetwork:
@pytest.mark.asyncio
async def test_get_host_addr4(self):
# Run these tests forcing IPv4 resolution
prefer_ipv6 = False
assert get_host_addr("127.0.0.1", prefer_ipv6) == "127.0.0.1"
assert get_host_addr("10.11.12.13", prefer_ipv6) == "10.11.12.13"
assert get_host_addr("localhost", prefer_ipv6) == "127.0.0.1"
assert get_host_addr("example.net", prefer_ipv6) == "93.184.216.34"
@pytest.mark.asyncio
@pytest.mark.skipif(
condition=("GITHUB_ACTIONS" in os.environ) and (sys.platform in {"darwin", "win32"}),
reason="macOS and Windows runners in GitHub Actions do not seem to support IPv6",
)
async def test_get_host_addr6(self):
# Run these tests forcing IPv6 resolution
prefer_ipv6 = True
assert get_host_addr("::1", prefer_ipv6) == "::1"
assert get_host_addr("2000:1000::1234:abcd", prefer_ipv6) == "2000:1000::1234:abcd"
# ip6-localhost is not always available, and localhost is IPv4 only
# on some systems. Just test neither here.
# assert get_host_addr("ip6-localhost", prefer_ipv6) == "::1"
# assert get_host_addr("localhost", prefer_ipv6) == "::1"
assert get_host_addr("example.net", prefer_ipv6) == "2606:2800:220:1:248:1893:25c8:1946"
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/test_misc.py | tests/util/test_misc.py | import pytest
from flax.util.errors import InvalidPathError
from flax.util.misc import format_bytes, validate_directory_writable
from flax.util.misc import format_minutes
class TestMisc:
@pytest.mark.asyncio
async def test_format_bytes(self):
assert format_bytes(None) == "Invalid"
assert format_bytes(dict()) == "Invalid"
assert format_bytes("some bytes") == "Invalid"
assert format_bytes(-1024) == "Invalid"
assert format_bytes(0) == "0.000 MiB"
assert format_bytes(1024) == "0.001 MiB"
assert format_bytes(1024**2 - 1000) == "0.999 MiB"
assert format_bytes(1024**2) == "1.000 MiB"
assert format_bytes(1024**3) == "1.000 GiB"
assert format_bytes(1024**4) == "1.000 TiB"
assert format_bytes(1024**5) == "1.000 PiB"
assert format_bytes(1024**6) == "1.000 EiB"
assert format_bytes(1024**7) == "1.000 ZiB"
assert format_bytes(1024**8) == "1.000 YiB"
assert format_bytes(1024**9) == "1024.000 YiB"
assert format_bytes(1024**10) == "1048576.000 YiB"
assert format_bytes(1024**20).endswith("YiB")
@pytest.mark.asyncio
async def test_format_minutes(self):
assert format_minutes(None) == "Invalid"
assert format_minutes(dict()) == "Invalid"
assert format_minutes("some minutes") == "Invalid"
assert format_minutes(-1) == "Unknown"
assert format_minutes(0) == "Now"
assert format_minutes(1) == "1 minute"
assert format_minutes(59) == "59 minutes"
assert format_minutes(60) == "1 hour"
assert format_minutes(61) == "1 hour and 1 minute"
assert format_minutes(119) == "1 hour and 59 minutes"
assert format_minutes(1380) == "23 hours"
assert format_minutes(1440) == "1 day"
assert format_minutes(2160) == "1 day and 12 hours"
assert format_minutes(8640) == "6 days"
assert format_minutes(10080) == "1 week"
assert format_minutes(20160) == "2 weeks"
assert format_minutes(40240) == "3 weeks and 6 days"
assert format_minutes(40340) == "4 weeks"
assert format_minutes(43800) == "1 month"
assert format_minutes(102000) == "2 months and 1 week"
assert format_minutes(481800) == "11 months"
assert format_minutes(525600) == "1 year"
assert format_minutes(1007400) == "1 year and 11 months"
assert format_minutes(5256000) == "10 years"
def test_validate_directory_writable(tmp_path) -> None:
write_test_path = tmp_path / ".write_test" # `.write_test` is used in validate_directory_writable
validate_directory_writable(tmp_path)
assert not write_test_path.exists()
subdir = tmp_path / "subdir"
with pytest.raises(InvalidPathError, match="Directory doesn't exist") as exc_info:
validate_directory_writable(subdir)
assert exc_info.value.path == subdir
assert not write_test_path.exists()
(tmp_path / ".write_test").mkdir()
with pytest.raises(InvalidPathError, match="Directory not writable") as exc_info:
validate_directory_writable(tmp_path)
assert exc_info.value.path == tmp_path
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/test_network_protocol_json.py | tests/util/test_network_protocol_json.py | # this file is generated by build_network_protocol_files.py
from __future__ import annotations
from tests.util.network_protocol_data import * # noqa: F403
from tests.util.protocol_messages_json import * # noqa: F403
def test_protocol_json() -> None:
assert str(new_signage_point_json) == str(new_signage_point.to_json_dict())
assert type(new_signage_point).from_json_dict(new_signage_point_json) == new_signage_point
assert str(declare_proof_of_space_json) == str(declare_proof_of_space.to_json_dict())
assert type(declare_proof_of_space).from_json_dict(declare_proof_of_space_json) == declare_proof_of_space
assert str(request_signed_values_json) == str(request_signed_values.to_json_dict())
assert type(request_signed_values).from_json_dict(request_signed_values_json) == request_signed_values
assert str(farming_info_json) == str(farming_info.to_json_dict())
assert type(farming_info).from_json_dict(farming_info_json) == farming_info
assert str(signed_values_json) == str(signed_values.to_json_dict())
assert type(signed_values).from_json_dict(signed_values_json) == signed_values
assert str(new_peak_json) == str(new_peak.to_json_dict())
assert type(new_peak).from_json_dict(new_peak_json) == new_peak
assert str(new_transaction_json) == str(new_transaction.to_json_dict())
assert type(new_transaction).from_json_dict(new_transaction_json) == new_transaction
assert str(request_transaction_json) == str(request_transaction.to_json_dict())
assert type(request_transaction).from_json_dict(request_transaction_json) == request_transaction
assert str(respond_transaction_json) == str(respond_transaction.to_json_dict())
assert type(respond_transaction).from_json_dict(respond_transaction_json) == respond_transaction
assert str(request_proof_of_weight_json) == str(request_proof_of_weight.to_json_dict())
assert type(request_proof_of_weight).from_json_dict(request_proof_of_weight_json) == request_proof_of_weight
assert str(respond_proof_of_weight_json) == str(respond_proof_of_weight.to_json_dict())
assert type(respond_proof_of_weight).from_json_dict(respond_proof_of_weight_json) == respond_proof_of_weight
assert str(request_block_json) == str(request_block.to_json_dict())
assert type(request_block).from_json_dict(request_block_json) == request_block
assert str(reject_block_json) == str(reject_block.to_json_dict())
assert type(reject_block).from_json_dict(reject_block_json) == reject_block
assert str(request_blocks_json) == str(request_blocks.to_json_dict())
assert type(request_blocks).from_json_dict(request_blocks_json) == request_blocks
assert str(respond_blocks_json) == str(respond_blocks.to_json_dict())
assert type(respond_blocks).from_json_dict(respond_blocks_json) == respond_blocks
assert str(reject_blocks_json) == str(reject_blocks.to_json_dict())
assert type(reject_blocks).from_json_dict(reject_blocks_json) == reject_blocks
assert str(respond_block_json) == str(respond_block.to_json_dict())
assert type(respond_block).from_json_dict(respond_block_json) == respond_block
assert str(new_unfinished_block_json) == str(new_unfinished_block.to_json_dict())
assert type(new_unfinished_block).from_json_dict(new_unfinished_block_json) == new_unfinished_block
assert str(request_unfinished_block_json) == str(request_unfinished_block.to_json_dict())
assert type(request_unfinished_block).from_json_dict(request_unfinished_block_json) == request_unfinished_block
assert str(respond_unfinished_block_json) == str(respond_unfinished_block.to_json_dict())
assert type(respond_unfinished_block).from_json_dict(respond_unfinished_block_json) == respond_unfinished_block
assert str(new_signage_point_or_end_of_subslot_json) == str(new_signage_point_or_end_of_subslot.to_json_dict())
assert (
type(new_signage_point_or_end_of_subslot).from_json_dict(new_signage_point_or_end_of_subslot_json)
== new_signage_point_or_end_of_subslot
)
assert str(request_signage_point_or_end_of_subslot_json) == str(
request_signage_point_or_end_of_subslot.to_json_dict()
)
assert (
type(request_signage_point_or_end_of_subslot).from_json_dict(request_signage_point_or_end_of_subslot_json)
== request_signage_point_or_end_of_subslot
)
assert str(respond_signage_point_json) == str(respond_signage_point.to_json_dict())
assert type(respond_signage_point).from_json_dict(respond_signage_point_json) == respond_signage_point
assert str(respond_end_of_subslot_json) == str(respond_end_of_subslot.to_json_dict())
assert type(respond_end_of_subslot).from_json_dict(respond_end_of_subslot_json) == respond_end_of_subslot
assert str(request_mempool_transaction_json) == str(request_mempool_transaction.to_json_dict())
assert (
type(request_mempool_transaction).from_json_dict(request_mempool_transaction_json)
== request_mempool_transaction
)
assert str(new_compact_vdf_json) == str(new_compact_vdf.to_json_dict())
assert type(new_compact_vdf).from_json_dict(new_compact_vdf_json) == new_compact_vdf
assert str(request_compact_vdf_json) == str(request_compact_vdf.to_json_dict())
assert type(request_compact_vdf).from_json_dict(request_compact_vdf_json) == request_compact_vdf
assert str(respond_compact_vdf_json) == str(respond_compact_vdf.to_json_dict())
assert type(respond_compact_vdf).from_json_dict(respond_compact_vdf_json) == respond_compact_vdf
assert str(request_peers_json) == str(request_peers.to_json_dict())
assert type(request_peers).from_json_dict(request_peers_json) == request_peers
assert str(respond_peers_json) == str(respond_peers.to_json_dict())
assert type(respond_peers).from_json_dict(respond_peers_json) == respond_peers
assert str(request_puzzle_solution_json) == str(request_puzzle_solution.to_json_dict())
assert type(request_puzzle_solution).from_json_dict(request_puzzle_solution_json) == request_puzzle_solution
assert str(puzzle_solution_response_json) == str(puzzle_solution_response.to_json_dict())
assert type(puzzle_solution_response).from_json_dict(puzzle_solution_response_json) == puzzle_solution_response
assert str(respond_puzzle_solution_json) == str(respond_puzzle_solution.to_json_dict())
assert type(respond_puzzle_solution).from_json_dict(respond_puzzle_solution_json) == respond_puzzle_solution
assert str(reject_puzzle_solution_json) == str(reject_puzzle_solution.to_json_dict())
assert type(reject_puzzle_solution).from_json_dict(reject_puzzle_solution_json) == reject_puzzle_solution
assert str(send_transaction_json) == str(send_transaction.to_json_dict())
assert type(send_transaction).from_json_dict(send_transaction_json) == send_transaction
assert str(transaction_ack_json) == str(transaction_ack.to_json_dict())
assert type(transaction_ack).from_json_dict(transaction_ack_json) == transaction_ack
assert str(new_peak_wallet_json) == str(new_peak_wallet.to_json_dict())
assert type(new_peak_wallet).from_json_dict(new_peak_wallet_json) == new_peak_wallet
assert str(request_block_header_json) == str(request_block_header.to_json_dict())
assert type(request_block_header).from_json_dict(request_block_header_json) == request_block_header
assert str(request_block_headers_json) == str(request_block_headers.to_json_dict())
assert type(request_block_headers).from_json_dict(request_block_headers_json) == request_block_headers
assert str(respond_header_block_json) == str(respond_header_block.to_json_dict())
assert type(respond_header_block).from_json_dict(respond_header_block_json) == respond_header_block
assert str(respond_block_headers_json) == str(respond_block_headers.to_json_dict())
assert type(respond_block_headers).from_json_dict(respond_block_headers_json) == respond_block_headers
assert str(reject_header_request_json) == str(reject_header_request.to_json_dict())
assert type(reject_header_request).from_json_dict(reject_header_request_json) == reject_header_request
assert str(request_removals_json) == str(request_removals.to_json_dict())
assert type(request_removals).from_json_dict(request_removals_json) == request_removals
assert str(respond_removals_json) == str(respond_removals.to_json_dict())
assert type(respond_removals).from_json_dict(respond_removals_json) == respond_removals
assert str(reject_removals_request_json) == str(reject_removals_request.to_json_dict())
assert type(reject_removals_request).from_json_dict(reject_removals_request_json) == reject_removals_request
assert str(request_additions_json) == str(request_additions.to_json_dict())
assert type(request_additions).from_json_dict(request_additions_json) == request_additions
assert str(respond_additions_json) == str(respond_additions.to_json_dict())
assert type(respond_additions).from_json_dict(respond_additions_json) == respond_additions
assert str(reject_additions_json) == str(reject_additions.to_json_dict())
assert type(reject_additions).from_json_dict(reject_additions_json) == reject_additions
assert str(request_header_blocks_json) == str(request_header_blocks.to_json_dict())
assert type(request_header_blocks).from_json_dict(request_header_blocks_json) == request_header_blocks
assert str(reject_header_blocks_json) == str(reject_header_blocks.to_json_dict())
assert type(reject_header_blocks).from_json_dict(reject_header_blocks_json) == reject_header_blocks
assert str(respond_header_blocks_json) == str(respond_header_blocks.to_json_dict())
assert type(respond_header_blocks).from_json_dict(respond_header_blocks_json) == respond_header_blocks
assert str(coin_state_json) == str(coin_state.to_json_dict())
assert type(coin_state).from_json_dict(coin_state_json) == coin_state
assert str(register_for_ph_updates_json) == str(register_for_ph_updates.to_json_dict())
assert type(register_for_ph_updates).from_json_dict(register_for_ph_updates_json) == register_for_ph_updates
assert str(reject_block_headers_json) == str(reject_block_headers.to_json_dict())
assert type(reject_block_headers).from_json_dict(reject_block_headers_json) == reject_block_headers
assert str(respond_to_ph_updates_json) == str(respond_to_ph_updates.to_json_dict())
assert type(respond_to_ph_updates).from_json_dict(respond_to_ph_updates_json) == respond_to_ph_updates
assert str(register_for_coin_updates_json) == str(register_for_coin_updates.to_json_dict())
assert type(register_for_coin_updates).from_json_dict(register_for_coin_updates_json) == register_for_coin_updates
assert str(respond_to_coin_updates_json) == str(respond_to_coin_updates.to_json_dict())
assert type(respond_to_coin_updates).from_json_dict(respond_to_coin_updates_json) == respond_to_coin_updates
assert str(coin_state_update_json) == str(coin_state_update.to_json_dict())
assert type(coin_state_update).from_json_dict(coin_state_update_json) == coin_state_update
assert str(request_children_json) == str(request_children.to_json_dict())
assert type(request_children).from_json_dict(request_children_json) == request_children
assert str(respond_children_json) == str(respond_children.to_json_dict())
assert type(respond_children).from_json_dict(respond_children_json) == respond_children
assert str(request_ses_info_json) == str(request_ses_info.to_json_dict())
assert type(request_ses_info).from_json_dict(request_ses_info_json) == request_ses_info
assert str(respond_ses_info_json) == str(respond_ses_info.to_json_dict())
assert type(respond_ses_info).from_json_dict(respond_ses_info_json) == respond_ses_info
assert str(pool_difficulty_json) == str(pool_difficulty.to_json_dict())
assert type(pool_difficulty).from_json_dict(pool_difficulty_json) == pool_difficulty
assert str(harvester_handhsake_json) == str(harvester_handhsake.to_json_dict())
assert type(harvester_handhsake).from_json_dict(harvester_handhsake_json) == harvester_handhsake
assert str(new_signage_point_harvester_json) == str(new_signage_point_harvester.to_json_dict())
assert (
type(new_signage_point_harvester).from_json_dict(new_signage_point_harvester_json)
== new_signage_point_harvester
)
assert str(new_proof_of_space_json) == str(new_proof_of_space.to_json_dict())
assert type(new_proof_of_space).from_json_dict(new_proof_of_space_json) == new_proof_of_space
assert str(request_signatures_json) == str(request_signatures.to_json_dict())
assert type(request_signatures).from_json_dict(request_signatures_json) == request_signatures
assert str(respond_signatures_json) == str(respond_signatures.to_json_dict())
assert type(respond_signatures).from_json_dict(respond_signatures_json) == respond_signatures
assert str(plot_json) == str(plot.to_json_dict())
assert type(plot).from_json_dict(plot_json) == plot
assert str(request_plots_json) == str(request_plots.to_json_dict())
assert type(request_plots).from_json_dict(request_plots_json) == request_plots
assert str(respond_plots_json) == str(respond_plots.to_json_dict())
assert type(respond_plots).from_json_dict(respond_plots_json) == respond_plots
assert str(request_peers_introducer_json) == str(request_peers_introducer.to_json_dict())
assert type(request_peers_introducer).from_json_dict(request_peers_introducer_json) == request_peers_introducer
assert str(respond_peers_introducer_json) == str(respond_peers_introducer.to_json_dict())
assert type(respond_peers_introducer).from_json_dict(respond_peers_introducer_json) == respond_peers_introducer
assert str(authentication_payload_json) == str(authentication_payload.to_json_dict())
assert type(authentication_payload).from_json_dict(authentication_payload_json) == authentication_payload
assert str(get_pool_info_response_json) == str(get_pool_info_response.to_json_dict())
assert type(get_pool_info_response).from_json_dict(get_pool_info_response_json) == get_pool_info_response
assert str(post_partial_payload_json) == str(post_partial_payload.to_json_dict())
assert type(post_partial_payload).from_json_dict(post_partial_payload_json) == post_partial_payload
assert str(post_partial_request_json) == str(post_partial_request.to_json_dict())
assert type(post_partial_request).from_json_dict(post_partial_request_json) == post_partial_request
assert str(post_partial_response_json) == str(post_partial_response.to_json_dict())
assert type(post_partial_response).from_json_dict(post_partial_response_json) == post_partial_response
assert str(get_farmer_response_json) == str(get_farmer_response.to_json_dict())
assert type(get_farmer_response).from_json_dict(get_farmer_response_json) == get_farmer_response
assert str(post_farmer_payload_json) == str(post_farmer_payload.to_json_dict())
assert type(post_farmer_payload).from_json_dict(post_farmer_payload_json) == post_farmer_payload
assert str(post_farmer_request_json) == str(post_farmer_request.to_json_dict())
assert type(post_farmer_request).from_json_dict(post_farmer_request_json) == post_farmer_request
assert str(post_farmer_response_json) == str(post_farmer_response.to_json_dict())
assert type(post_farmer_response).from_json_dict(post_farmer_response_json) == post_farmer_response
assert str(put_farmer_payload_json) == str(put_farmer_payload.to_json_dict())
assert type(put_farmer_payload).from_json_dict(put_farmer_payload_json) == put_farmer_payload
assert str(put_farmer_request_json) == str(put_farmer_request.to_json_dict())
assert type(put_farmer_request).from_json_dict(put_farmer_request_json) == put_farmer_request
assert str(put_farmer_response_json) == str(put_farmer_response.to_json_dict())
assert type(put_farmer_response).from_json_dict(put_farmer_response_json) == put_farmer_response
assert str(error_response_json) == str(error_response.to_json_dict())
assert type(error_response).from_json_dict(error_response_json) == error_response
assert str(new_peak_timelord_json) == str(new_peak_timelord.to_json_dict())
assert type(new_peak_timelord).from_json_dict(new_peak_timelord_json) == new_peak_timelord
assert str(new_unfinished_block_timelord_json) == str(new_unfinished_block_timelord.to_json_dict())
assert (
type(new_unfinished_block_timelord).from_json_dict(new_unfinished_block_timelord_json)
== new_unfinished_block_timelord
)
assert str(new_infusion_point_vdf_json) == str(new_infusion_point_vdf.to_json_dict())
assert type(new_infusion_point_vdf).from_json_dict(new_infusion_point_vdf_json) == new_infusion_point_vdf
assert str(new_signage_point_vdf_json) == str(new_signage_point_vdf.to_json_dict())
assert type(new_signage_point_vdf).from_json_dict(new_signage_point_vdf_json) == new_signage_point_vdf
assert str(new_end_of_sub_slot_bundle_json) == str(new_end_of_sub_slot_bundle.to_json_dict())
assert (
type(new_end_of_sub_slot_bundle).from_json_dict(new_end_of_sub_slot_bundle_json) == new_end_of_sub_slot_bundle
)
assert str(request_compact_proof_of_time_json) == str(request_compact_proof_of_time.to_json_dict())
assert (
type(request_compact_proof_of_time).from_json_dict(request_compact_proof_of_time_json)
== request_compact_proof_of_time
)
assert str(respond_compact_proof_of_time_json) == str(respond_compact_proof_of_time.to_json_dict())
assert (
type(respond_compact_proof_of_time).from_json_dict(respond_compact_proof_of_time_json)
== respond_compact_proof_of_time
)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/keyring.py | tests/util/keyring.py | import os
import shutil
import tempfile
from flax.util.file_keyring import FileKeyring, keyring_path_from_root
from flax.util.keychain import Keychain, default_keychain_service, default_keychain_user, get_private_key_user
from flax.util.keyring_wrapper import KeyringWrapper
from functools import wraps
from keyring.util import platform_
from keyrings.cryptfile.cryptfile import CryptFileKeyring # pyright: reportMissingImports=false
from pathlib import Path
from typing import Any, Optional
from unittest.mock import patch
def create_empty_cryptfilekeyring() -> CryptFileKeyring:
"""
Create an empty legacy keyring
"""
crypt_file_keyring = CryptFileKeyring()
fd = os.open(crypt_file_keyring.file_path, os.O_CREAT | os.O_WRONLY | os.O_TRUNC, 0o600)
os.close(fd)
assert Path(crypt_file_keyring.file_path).exists()
return crypt_file_keyring
def add_dummy_key_to_cryptfilekeyring(crypt_file_keyring: CryptFileKeyring):
"""
Add a fake key to the CryptFileKeyring
"""
crypt_file_keyring.keyring_key = "your keyring password"
user: str = get_private_key_user(default_keychain_user(), 0)
crypt_file_keyring.set_password(default_keychain_service(), user, "abc123")
def setup_mock_file_keyring(mock_configure_backend, temp_file_keyring_dir, populate=False):
if populate:
# Populate the file keyring with an empty (but encrypted) data set
file_keyring_path = keyring_path_from_root(Path(temp_file_keyring_dir))
os.makedirs(os.path.dirname(file_keyring_path), 0o700, True)
with open(
os.open(
keyring_path_from_root(Path(temp_file_keyring_dir)),
os.O_CREAT | os.O_WRONLY | os.O_TRUNC,
0o600,
),
"w",
) as f:
f.write(
# Encrypted using DEFAULT_PASSPHRASE_IF_NO_MASTER_PASSPHRASE. Data holds an empty keyring.
"data: xtcxYOWtbeO9ruv4Nkwhw1pcTJCNh/fvPSdFxez/L0ysnag=\n"
"nonce: 17ecac58deb7a392fccef49e\n"
"salt: b1aa32d5730288d653e82017e4a4057c\n"
"version: 1"
)
# Create the file keyring
mock_configure_backend.return_value = FileKeyring.create(keys_root_path=Path(temp_file_keyring_dir))
def using_temp_file_keyring(populate=False):
"""
Decorator that will create a temporary directory with a temporary keyring that is
automatically cleaned-up after invoking the decorated function. If `populate` is
true, the newly created keyring will be populated with a payload containing 0 keys
using the default passphrase.
"""
def outer(method):
@wraps(method)
def inner(self, *args, **kwargs):
with TempKeyring(populate=populate):
return method(self, *args, **kwargs)
return inner
return outer
def using_temp_file_keyring_and_cryptfilekeyring(populate=False):
"""
Like the `using_temp_file_keyring` decorator, this decorator will create a temp
dir and temp keyring. Additionally, an empty legacy Cryptfile keyring will be
created in the temp directory.
"""
def outer(method):
@wraps(method)
def inner(self, *args, **kwargs):
with TempKeyring(populate=populate, setup_cryptfilekeyring=True):
return method(self, *args, **kwargs)
return inner
return outer
class TempKeyring:
def __init__(
self,
*,
user: str = "testing-1.8.0",
service: str = "testing-flax-1.8.0",
populate: bool = False,
setup_cryptfilekeyring: bool = False,
existing_keyring_path: str = None,
delete_on_cleanup: bool = True,
use_os_credential_store: bool = False,
):
self.keychain = self._patch_and_create_keychain(
user=user,
service=service,
populate=populate,
existing_keyring_path=existing_keyring_path,
use_os_credential_store=use_os_credential_store,
setup_cryptfilekeyring=setup_cryptfilekeyring,
)
self.old_keys_root_path = None
self.delete_on_cleanup = delete_on_cleanup
self.cleaned_up = False
def _patch_and_create_keychain(
self,
*,
user: str,
service: str,
populate: bool,
setup_cryptfilekeyring: bool,
existing_keyring_path: Optional[str],
use_os_credential_store: bool,
):
existing_keyring_dir = Path(existing_keyring_path).parent if existing_keyring_path else None
temp_dir = existing_keyring_dir or tempfile.mkdtemp(prefix="test_keyring_wrapper")
mock_supports_os_passphrase_storage_patch = patch("flax.util.keychain.supports_os_passphrase_storage")
mock_supports_os_passphrase_storage = mock_supports_os_passphrase_storage_patch.start()
# Patch supports_os_passphrase_storage() to return use_os_credential_store
mock_supports_os_passphrase_storage.return_value = use_os_credential_store
mock_configure_backend_patch = patch.object(KeyringWrapper, "_configure_backend")
mock_configure_backend = mock_configure_backend_patch.start()
setup_mock_file_keyring(mock_configure_backend, temp_dir, populate=populate)
mock_configure_legacy_backend_patch: Any = None
if setup_cryptfilekeyring is False:
mock_configure_legacy_backend_patch = patch.object(KeyringWrapper, "_configure_legacy_backend")
mock_configure_legacy_backend = mock_configure_legacy_backend_patch.start()
mock_configure_legacy_backend.return_value = None
mock_data_root_patch = patch.object(platform_, "data_root")
mock_data_root = mock_data_root_patch.start()
# Mock CryptFileKeyring's file_path indirectly by changing keyring.util.platform_.data_root
# We don't want CryptFileKeyring finding the real legacy keyring
mock_data_root.return_value = temp_dir
if setup_cryptfilekeyring is True:
crypt_file_keyring = create_empty_cryptfilekeyring()
add_dummy_key_to_cryptfilekeyring(crypt_file_keyring)
keychain = Keychain(user=user, service=service)
keychain.keyring_wrapper = KeyringWrapper(keys_root_path=Path(temp_dir))
# Stash the temp_dir in the keychain instance
keychain._temp_dir = temp_dir # type: ignore
# Stash the patches in the keychain instance
keychain._mock_supports_os_passphrase_storage_patch = mock_supports_os_passphrase_storage_patch # type: ignore
keychain._mock_configure_backend_patch = mock_configure_backend_patch # type: ignore
keychain._mock_configure_legacy_backend_patch = mock_configure_legacy_backend_patch # type: ignore
keychain._mock_data_root_patch = mock_data_root_patch # type: ignore
return keychain
def __enter__(self):
assert not self.cleaned_up
if KeyringWrapper.get_shared_instance(create_if_necessary=False) is not None:
self.old_keys_root_path = KeyringWrapper.get_shared_instance().keys_root_path
KeyringWrapper.cleanup_shared_instance()
kc = self.get_keychain()
KeyringWrapper.set_keys_root_path(kc.keyring_wrapper.keys_root_path)
return kc
def __exit__(self, exc_type, exc_value, exc_tb):
self.cleanup()
def get_keychain(self):
return self.keychain
def cleanup(self):
assert not self.cleaned_up
keys_root_path = self.keychain.keyring_wrapper.keys_root_path
if self.delete_on_cleanup:
self.keychain.keyring_wrapper.keyring.cleanup_keyring_file_watcher()
shutil.rmtree(self.keychain._temp_dir)
self.keychain._mock_supports_os_passphrase_storage_patch.stop()
self.keychain._mock_configure_backend_patch.stop()
if self.keychain._mock_configure_legacy_backend_patch is not None:
self.keychain._mock_configure_legacy_backend_patch.stop()
self.keychain._mock_data_root_patch.stop()
if self.old_keys_root_path is not None:
if KeyringWrapper.get_shared_instance(create_if_necessary=False) is not None:
shared_keys_root_path = KeyringWrapper.get_shared_instance().keys_root_path
if shared_keys_root_path == keys_root_path:
KeyringWrapper.cleanup_shared_instance()
KeyringWrapper.set_keys_root_path(self.old_keys_root_path)
KeyringWrapper.get_shared_instance()
self.cleaned_up = True
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/test_network_protocol_files.py | tests/util/test_network_protocol_files.py | # this file is generated by build_network_protocol_files.py
from typing import Tuple
from pathlib import Path
from tests.util.network_protocol_data import * # noqa: F403
from tests.util.protocol_messages_json import * # noqa: F403
from tests.util.build_network_protocol_files import get_network_protocol_filename
def parse_blob(input_bytes: bytes) -> Tuple[bytes, bytes]:
size_bytes = input_bytes[:4]
input_bytes = input_bytes[4:]
size = int.from_bytes(size_bytes, "big")
message_bytes = input_bytes[:size]
input_bytes = input_bytes[size:]
return message_bytes, input_bytes
def test_protocol_bytes() -> None:
filename: Path = get_network_protocol_filename()
assert filename.exists()
with open(filename, "rb") as f:
input_bytes = f.read()
message_bytes, input_bytes = parse_blob(input_bytes)
message_0 = type(new_signage_point).from_bytes(message_bytes)
assert message_0 == new_signage_point
assert bytes(message_0) == bytes(new_signage_point)
message_bytes, input_bytes = parse_blob(input_bytes)
message_1 = type(declare_proof_of_space).from_bytes(message_bytes)
assert message_1 == declare_proof_of_space
assert bytes(message_1) == bytes(declare_proof_of_space)
message_bytes, input_bytes = parse_blob(input_bytes)
message_2 = type(request_signed_values).from_bytes(message_bytes)
assert message_2 == request_signed_values
assert bytes(message_2) == bytes(request_signed_values)
message_bytes, input_bytes = parse_blob(input_bytes)
message_3 = type(farming_info).from_bytes(message_bytes)
assert message_3 == farming_info
assert bytes(message_3) == bytes(farming_info)
message_bytes, input_bytes = parse_blob(input_bytes)
message_4 = type(signed_values).from_bytes(message_bytes)
assert message_4 == signed_values
assert bytes(message_4) == bytes(signed_values)
message_bytes, input_bytes = parse_blob(input_bytes)
message_5 = type(new_peak).from_bytes(message_bytes)
assert message_5 == new_peak
assert bytes(message_5) == bytes(new_peak)
message_bytes, input_bytes = parse_blob(input_bytes)
message_6 = type(new_transaction).from_bytes(message_bytes)
assert message_6 == new_transaction
assert bytes(message_6) == bytes(new_transaction)
message_bytes, input_bytes = parse_blob(input_bytes)
message_7 = type(request_transaction).from_bytes(message_bytes)
assert message_7 == request_transaction
assert bytes(message_7) == bytes(request_transaction)
message_bytes, input_bytes = parse_blob(input_bytes)
message_8 = type(respond_transaction).from_bytes(message_bytes)
assert message_8 == respond_transaction
assert bytes(message_8) == bytes(respond_transaction)
message_bytes, input_bytes = parse_blob(input_bytes)
message_9 = type(request_proof_of_weight).from_bytes(message_bytes)
assert message_9 == request_proof_of_weight
assert bytes(message_9) == bytes(request_proof_of_weight)
message_bytes, input_bytes = parse_blob(input_bytes)
message_10 = type(respond_proof_of_weight).from_bytes(message_bytes)
assert message_10 == respond_proof_of_weight
assert bytes(message_10) == bytes(respond_proof_of_weight)
message_bytes, input_bytes = parse_blob(input_bytes)
message_11 = type(request_block).from_bytes(message_bytes)
assert message_11 == request_block
assert bytes(message_11) == bytes(request_block)
message_bytes, input_bytes = parse_blob(input_bytes)
message_12 = type(reject_block).from_bytes(message_bytes)
assert message_12 == reject_block
assert bytes(message_12) == bytes(reject_block)
message_bytes, input_bytes = parse_blob(input_bytes)
message_13 = type(request_blocks).from_bytes(message_bytes)
assert message_13 == request_blocks
assert bytes(message_13) == bytes(request_blocks)
message_bytes, input_bytes = parse_blob(input_bytes)
message_14 = type(respond_blocks).from_bytes(message_bytes)
assert message_14 == respond_blocks
assert bytes(message_14) == bytes(respond_blocks)
message_bytes, input_bytes = parse_blob(input_bytes)
message_15 = type(reject_blocks).from_bytes(message_bytes)
assert message_15 == reject_blocks
assert bytes(message_15) == bytes(reject_blocks)
message_bytes, input_bytes = parse_blob(input_bytes)
message_16 = type(respond_block).from_bytes(message_bytes)
assert message_16 == respond_block
assert bytes(message_16) == bytes(respond_block)
message_bytes, input_bytes = parse_blob(input_bytes)
message_17 = type(new_unfinished_block).from_bytes(message_bytes)
assert message_17 == new_unfinished_block
assert bytes(message_17) == bytes(new_unfinished_block)
message_bytes, input_bytes = parse_blob(input_bytes)
message_18 = type(request_unfinished_block).from_bytes(message_bytes)
assert message_18 == request_unfinished_block
assert bytes(message_18) == bytes(request_unfinished_block)
message_bytes, input_bytes = parse_blob(input_bytes)
message_19 = type(respond_unfinished_block).from_bytes(message_bytes)
assert message_19 == respond_unfinished_block
assert bytes(message_19) == bytes(respond_unfinished_block)
message_bytes, input_bytes = parse_blob(input_bytes)
message_20 = type(new_signage_point_or_end_of_subslot).from_bytes(message_bytes)
assert message_20 == new_signage_point_or_end_of_subslot
assert bytes(message_20) == bytes(new_signage_point_or_end_of_subslot)
message_bytes, input_bytes = parse_blob(input_bytes)
message_21 = type(request_signage_point_or_end_of_subslot).from_bytes(message_bytes)
assert message_21 == request_signage_point_or_end_of_subslot
assert bytes(message_21) == bytes(request_signage_point_or_end_of_subslot)
message_bytes, input_bytes = parse_blob(input_bytes)
message_22 = type(respond_signage_point).from_bytes(message_bytes)
assert message_22 == respond_signage_point
assert bytes(message_22) == bytes(respond_signage_point)
message_bytes, input_bytes = parse_blob(input_bytes)
message_23 = type(respond_end_of_subslot).from_bytes(message_bytes)
assert message_23 == respond_end_of_subslot
assert bytes(message_23) == bytes(respond_end_of_subslot)
message_bytes, input_bytes = parse_blob(input_bytes)
message_24 = type(request_mempool_transaction).from_bytes(message_bytes)
assert message_24 == request_mempool_transaction
assert bytes(message_24) == bytes(request_mempool_transaction)
message_bytes, input_bytes = parse_blob(input_bytes)
message_25 = type(new_compact_vdf).from_bytes(message_bytes)
assert message_25 == new_compact_vdf
assert bytes(message_25) == bytes(new_compact_vdf)
message_bytes, input_bytes = parse_blob(input_bytes)
message_26 = type(request_compact_vdf).from_bytes(message_bytes)
assert message_26 == request_compact_vdf
assert bytes(message_26) == bytes(request_compact_vdf)
message_bytes, input_bytes = parse_blob(input_bytes)
message_27 = type(respond_compact_vdf).from_bytes(message_bytes)
assert message_27 == respond_compact_vdf
assert bytes(message_27) == bytes(respond_compact_vdf)
message_bytes, input_bytes = parse_blob(input_bytes)
message_28 = type(request_peers).from_bytes(message_bytes)
assert message_28 == request_peers
assert bytes(message_28) == bytes(request_peers)
message_bytes, input_bytes = parse_blob(input_bytes)
message_29 = type(respond_peers).from_bytes(message_bytes)
assert message_29 == respond_peers
assert bytes(message_29) == bytes(respond_peers)
message_bytes, input_bytes = parse_blob(input_bytes)
message_30 = type(request_puzzle_solution).from_bytes(message_bytes)
assert message_30 == request_puzzle_solution
assert bytes(message_30) == bytes(request_puzzle_solution)
message_bytes, input_bytes = parse_blob(input_bytes)
message_31 = type(puzzle_solution_response).from_bytes(message_bytes)
assert message_31 == puzzle_solution_response
assert bytes(message_31) == bytes(puzzle_solution_response)
message_bytes, input_bytes = parse_blob(input_bytes)
message_32 = type(respond_puzzle_solution).from_bytes(message_bytes)
assert message_32 == respond_puzzle_solution
assert bytes(message_32) == bytes(respond_puzzle_solution)
message_bytes, input_bytes = parse_blob(input_bytes)
message_33 = type(reject_puzzle_solution).from_bytes(message_bytes)
assert message_33 == reject_puzzle_solution
assert bytes(message_33) == bytes(reject_puzzle_solution)
message_bytes, input_bytes = parse_blob(input_bytes)
message_34 = type(send_transaction).from_bytes(message_bytes)
assert message_34 == send_transaction
assert bytes(message_34) == bytes(send_transaction)
message_bytes, input_bytes = parse_blob(input_bytes)
message_35 = type(transaction_ack).from_bytes(message_bytes)
assert message_35 == transaction_ack
assert bytes(message_35) == bytes(transaction_ack)
message_bytes, input_bytes = parse_blob(input_bytes)
message_36 = type(new_peak_wallet).from_bytes(message_bytes)
assert message_36 == new_peak_wallet
assert bytes(message_36) == bytes(new_peak_wallet)
message_bytes, input_bytes = parse_blob(input_bytes)
message_37 = type(request_block_header).from_bytes(message_bytes)
assert message_37 == request_block_header
assert bytes(message_37) == bytes(request_block_header)
message_bytes, input_bytes = parse_blob(input_bytes)
message_38 = type(request_block_headers).from_bytes(message_bytes)
assert message_38 == request_block_headers
assert bytes(message_38) == bytes(request_block_headers)
message_bytes, input_bytes = parse_blob(input_bytes)
message_39 = type(respond_header_block).from_bytes(message_bytes)
assert message_39 == respond_header_block
assert bytes(message_39) == bytes(respond_header_block)
message_bytes, input_bytes = parse_blob(input_bytes)
message_40 = type(respond_block_headers).from_bytes(message_bytes)
assert message_40 == respond_block_headers
assert bytes(message_40) == bytes(respond_block_headers)
message_bytes, input_bytes = parse_blob(input_bytes)
message_41 = type(reject_header_request).from_bytes(message_bytes)
assert message_41 == reject_header_request
assert bytes(message_41) == bytes(reject_header_request)
message_bytes, input_bytes = parse_blob(input_bytes)
message_42 = type(request_removals).from_bytes(message_bytes)
assert message_42 == request_removals
assert bytes(message_42) == bytes(request_removals)
message_bytes, input_bytes = parse_blob(input_bytes)
message_43 = type(respond_removals).from_bytes(message_bytes)
assert message_43 == respond_removals
assert bytes(message_43) == bytes(respond_removals)
message_bytes, input_bytes = parse_blob(input_bytes)
message_44 = type(reject_removals_request).from_bytes(message_bytes)
assert message_44 == reject_removals_request
assert bytes(message_44) == bytes(reject_removals_request)
message_bytes, input_bytes = parse_blob(input_bytes)
message_45 = type(request_additions).from_bytes(message_bytes)
assert message_45 == request_additions
assert bytes(message_45) == bytes(request_additions)
message_bytes, input_bytes = parse_blob(input_bytes)
message_46 = type(respond_additions).from_bytes(message_bytes)
assert message_46 == respond_additions
assert bytes(message_46) == bytes(respond_additions)
message_bytes, input_bytes = parse_blob(input_bytes)
message_47 = type(reject_additions).from_bytes(message_bytes)
assert message_47 == reject_additions
assert bytes(message_47) == bytes(reject_additions)
message_bytes, input_bytes = parse_blob(input_bytes)
message_48 = type(request_header_blocks).from_bytes(message_bytes)
assert message_48 == request_header_blocks
assert bytes(message_48) == bytes(request_header_blocks)
message_bytes, input_bytes = parse_blob(input_bytes)
message_49 = type(reject_header_blocks).from_bytes(message_bytes)
assert message_49 == reject_header_blocks
assert bytes(message_49) == bytes(reject_header_blocks)
message_bytes, input_bytes = parse_blob(input_bytes)
message_50 = type(respond_header_blocks).from_bytes(message_bytes)
assert message_50 == respond_header_blocks
assert bytes(message_50) == bytes(respond_header_blocks)
message_bytes, input_bytes = parse_blob(input_bytes)
message_51 = type(coin_state).from_bytes(message_bytes)
assert message_51 == coin_state
assert bytes(message_51) == bytes(coin_state)
message_bytes, input_bytes = parse_blob(input_bytes)
message_52 = type(register_for_ph_updates).from_bytes(message_bytes)
assert message_52 == register_for_ph_updates
assert bytes(message_52) == bytes(register_for_ph_updates)
message_bytes, input_bytes = parse_blob(input_bytes)
message_53 = type(reject_block_headers).from_bytes(message_bytes)
assert message_53 == reject_block_headers
assert bytes(message_53) == bytes(reject_block_headers)
message_bytes, input_bytes = parse_blob(input_bytes)
message_54 = type(respond_to_ph_updates).from_bytes(message_bytes)
assert message_54 == respond_to_ph_updates
assert bytes(message_54) == bytes(respond_to_ph_updates)
message_bytes, input_bytes = parse_blob(input_bytes)
message_55 = type(register_for_coin_updates).from_bytes(message_bytes)
assert message_55 == register_for_coin_updates
assert bytes(message_55) == bytes(register_for_coin_updates)
message_bytes, input_bytes = parse_blob(input_bytes)
message_56 = type(respond_to_coin_updates).from_bytes(message_bytes)
assert message_56 == respond_to_coin_updates
assert bytes(message_56) == bytes(respond_to_coin_updates)
message_bytes, input_bytes = parse_blob(input_bytes)
message_57 = type(coin_state_update).from_bytes(message_bytes)
assert message_57 == coin_state_update
assert bytes(message_57) == bytes(coin_state_update)
message_bytes, input_bytes = parse_blob(input_bytes)
message_58 = type(request_children).from_bytes(message_bytes)
assert message_58 == request_children
assert bytes(message_58) == bytes(request_children)
message_bytes, input_bytes = parse_blob(input_bytes)
message_59 = type(respond_children).from_bytes(message_bytes)
assert message_59 == respond_children
assert bytes(message_59) == bytes(respond_children)
message_bytes, input_bytes = parse_blob(input_bytes)
message_60 = type(request_ses_info).from_bytes(message_bytes)
assert message_60 == request_ses_info
assert bytes(message_60) == bytes(request_ses_info)
message_bytes, input_bytes = parse_blob(input_bytes)
message_61 = type(respond_ses_info).from_bytes(message_bytes)
assert message_61 == respond_ses_info
assert bytes(message_61) == bytes(respond_ses_info)
message_bytes, input_bytes = parse_blob(input_bytes)
message_62 = type(pool_difficulty).from_bytes(message_bytes)
assert message_62 == pool_difficulty
assert bytes(message_62) == bytes(pool_difficulty)
message_bytes, input_bytes = parse_blob(input_bytes)
message_63 = type(harvester_handhsake).from_bytes(message_bytes)
assert message_63 == harvester_handhsake
assert bytes(message_63) == bytes(harvester_handhsake)
message_bytes, input_bytes = parse_blob(input_bytes)
message_64 = type(new_signage_point_harvester).from_bytes(message_bytes)
assert message_64 == new_signage_point_harvester
assert bytes(message_64) == bytes(new_signage_point_harvester)
message_bytes, input_bytes = parse_blob(input_bytes)
message_65 = type(new_proof_of_space).from_bytes(message_bytes)
assert message_65 == new_proof_of_space
assert bytes(message_65) == bytes(new_proof_of_space)
message_bytes, input_bytes = parse_blob(input_bytes)
message_66 = type(request_signatures).from_bytes(message_bytes)
assert message_66 == request_signatures
assert bytes(message_66) == bytes(request_signatures)
message_bytes, input_bytes = parse_blob(input_bytes)
message_67 = type(respond_signatures).from_bytes(message_bytes)
assert message_67 == respond_signatures
assert bytes(message_67) == bytes(respond_signatures)
message_bytes, input_bytes = parse_blob(input_bytes)
message_68 = type(plot).from_bytes(message_bytes)
assert message_68 == plot
assert bytes(message_68) == bytes(plot)
message_bytes, input_bytes = parse_blob(input_bytes)
message_69 = type(request_plots).from_bytes(message_bytes)
assert message_69 == request_plots
assert bytes(message_69) == bytes(request_plots)
message_bytes, input_bytes = parse_blob(input_bytes)
message_70 = type(respond_plots).from_bytes(message_bytes)
assert message_70 == respond_plots
assert bytes(message_70) == bytes(respond_plots)
message_bytes, input_bytes = parse_blob(input_bytes)
message_71 = type(request_peers_introducer).from_bytes(message_bytes)
assert message_71 == request_peers_introducer
assert bytes(message_71) == bytes(request_peers_introducer)
message_bytes, input_bytes = parse_blob(input_bytes)
message_72 = type(respond_peers_introducer).from_bytes(message_bytes)
assert message_72 == respond_peers_introducer
assert bytes(message_72) == bytes(respond_peers_introducer)
message_bytes, input_bytes = parse_blob(input_bytes)
message_73 = type(authentication_payload).from_bytes(message_bytes)
assert message_73 == authentication_payload
assert bytes(message_73) == bytes(authentication_payload)
message_bytes, input_bytes = parse_blob(input_bytes)
message_74 = type(get_pool_info_response).from_bytes(message_bytes)
assert message_74 == get_pool_info_response
assert bytes(message_74) == bytes(get_pool_info_response)
message_bytes, input_bytes = parse_blob(input_bytes)
message_75 = type(post_partial_payload).from_bytes(message_bytes)
assert message_75 == post_partial_payload
assert bytes(message_75) == bytes(post_partial_payload)
message_bytes, input_bytes = parse_blob(input_bytes)
message_76 = type(post_partial_request).from_bytes(message_bytes)
assert message_76 == post_partial_request
assert bytes(message_76) == bytes(post_partial_request)
message_bytes, input_bytes = parse_blob(input_bytes)
message_77 = type(post_partial_response).from_bytes(message_bytes)
assert message_77 == post_partial_response
assert bytes(message_77) == bytes(post_partial_response)
message_bytes, input_bytes = parse_blob(input_bytes)
message_78 = type(get_farmer_response).from_bytes(message_bytes)
assert message_78 == get_farmer_response
assert bytes(message_78) == bytes(get_farmer_response)
message_bytes, input_bytes = parse_blob(input_bytes)
message_79 = type(post_farmer_payload).from_bytes(message_bytes)
assert message_79 == post_farmer_payload
assert bytes(message_79) == bytes(post_farmer_payload)
message_bytes, input_bytes = parse_blob(input_bytes)
message_80 = type(post_farmer_request).from_bytes(message_bytes)
assert message_80 == post_farmer_request
assert bytes(message_80) == bytes(post_farmer_request)
message_bytes, input_bytes = parse_blob(input_bytes)
message_81 = type(post_farmer_response).from_bytes(message_bytes)
assert message_81 == post_farmer_response
assert bytes(message_81) == bytes(post_farmer_response)
message_bytes, input_bytes = parse_blob(input_bytes)
message_82 = type(put_farmer_payload).from_bytes(message_bytes)
assert message_82 == put_farmer_payload
assert bytes(message_82) == bytes(put_farmer_payload)
message_bytes, input_bytes = parse_blob(input_bytes)
message_83 = type(put_farmer_request).from_bytes(message_bytes)
assert message_83 == put_farmer_request
assert bytes(message_83) == bytes(put_farmer_request)
message_bytes, input_bytes = parse_blob(input_bytes)
message_84 = type(put_farmer_response).from_bytes(message_bytes)
assert message_84 == put_farmer_response
assert bytes(message_84) == bytes(put_farmer_response)
message_bytes, input_bytes = parse_blob(input_bytes)
message_85 = type(error_response).from_bytes(message_bytes)
assert message_85 == error_response
assert bytes(message_85) == bytes(error_response)
message_bytes, input_bytes = parse_blob(input_bytes)
message_86 = type(new_peak_timelord).from_bytes(message_bytes)
assert message_86 == new_peak_timelord
assert bytes(message_86) == bytes(new_peak_timelord)
message_bytes, input_bytes = parse_blob(input_bytes)
message_87 = type(new_unfinished_block_timelord).from_bytes(message_bytes)
assert message_87 == new_unfinished_block_timelord
assert bytes(message_87) == bytes(new_unfinished_block_timelord)
message_bytes, input_bytes = parse_blob(input_bytes)
message_88 = type(new_infusion_point_vdf).from_bytes(message_bytes)
assert message_88 == new_infusion_point_vdf
assert bytes(message_88) == bytes(new_infusion_point_vdf)
message_bytes, input_bytes = parse_blob(input_bytes)
message_89 = type(new_signage_point_vdf).from_bytes(message_bytes)
assert message_89 == new_signage_point_vdf
assert bytes(message_89) == bytes(new_signage_point_vdf)
message_bytes, input_bytes = parse_blob(input_bytes)
message_90 = type(new_end_of_sub_slot_bundle).from_bytes(message_bytes)
assert message_90 == new_end_of_sub_slot_bundle
assert bytes(message_90) == bytes(new_end_of_sub_slot_bundle)
message_bytes, input_bytes = parse_blob(input_bytes)
message_91 = type(request_compact_proof_of_time).from_bytes(message_bytes)
assert message_91 == request_compact_proof_of_time
assert bytes(message_91) == bytes(request_compact_proof_of_time)
message_bytes, input_bytes = parse_blob(input_bytes)
message_92 = type(respond_compact_proof_of_time).from_bytes(message_bytes)
assert message_92 == respond_compact_proof_of_time
assert bytes(message_92) == bytes(respond_compact_proof_of_time)
assert input_bytes == b""
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/util/test_network_protocol_test.py | tests/util/test_network_protocol_test.py | # flake8: noqa
from __future__ import annotations
from typing import Any, List, Set
from flax.protocols import (
farmer_protocol,
full_node_protocol,
harvester_protocol,
introducer_protocol,
pool_protocol,
shared_protocol,
timelord_protocol,
wallet_protocol,
)
# this test is to ensure the network protocol message regression test always
# stays up to date. It's a test for the test
def types_in_module(mod: Any) -> Set[str]:
ret: List[str] = []
mod_name = mod.__name__
for sym in dir(mod):
obj = getattr(mod, sym)
if hasattr(obj, "__module__") and obj.__module__ == mod_name:
ret.append(sym)
if hasattr(mod, "__all__"):
ret += getattr(mod, "__all__")
return set(ret)
def test_missing_messages_state_machine() -> None:
from flax.protocols.protocol_state_machine import NO_REPLY_EXPECTED, VALID_REPLY_MESSAGE_MAP
# if these asserts fail, make sure to add the new network protocol messages
# to the visitor in build_network_protocol_files.py and rerun it. Then
# update this test
assert (
len(VALID_REPLY_MESSAGE_MAP) == 20
), "A message was added to the protocol state machine. Make sure to update the protocol message regression test to include the new message"
assert (
len(NO_REPLY_EXPECTED) == 7
), "A message was added to the protocol state machine. Make sure to update the protocol message regression test to include the new message"
def test_missing_messages() -> None:
wallet_msgs = {
"CoinState",
"CoinStateUpdate",
"NewPeakWallet",
"PuzzleSolutionResponse",
"RegisterForCoinUpdates",
"RegisterForPhUpdates",
"RejectAdditionsRequest",
"RejectBlockHeaders",
"RejectHeaderBlocks",
"RejectHeaderRequest",
"RejectPuzzleSolution",
"RejectRemovalsRequest",
"RequestAdditions",
"RequestBlockHeader",
"RequestBlockHeaders",
"RequestChildren",
"RequestFeeEstimates",
"RequestHeaderBlocks",
"RequestPuzzleSolution",
"RequestRemovals",
"RequestSESInfo",
"RespondAdditions",
"RespondBlockHeader",
"RespondBlockHeaders",
"RespondChildren",
"RespondFeeEstimates",
"RespondHeaderBlocks",
"RespondPuzzleSolution",
"RespondRemovals",
"RespondSESInfo",
"RespondToCoinUpdates",
"RespondToPhUpdates",
"SendTransaction",
"TransactionAck",
}
farmer_msgs = {
"DeclareProofOfSpace",
"FarmingInfo",
"NewSignagePoint",
"RequestSignedValues",
"SignedValues",
}
full_node_msgs = {
"NewCompactVDF",
"NewPeak",
"NewSignagePointOrEndOfSubSlot",
"NewTransaction",
"NewUnfinishedBlock",
"RejectBlock",
"RejectBlocks",
"RequestBlock",
"RequestBlocks",
"RequestCompactVDF",
"RequestMempoolTransactions",
"RequestPeers",
"RequestProofOfWeight",
"RequestSignagePointOrEndOfSubSlot",
"RequestTransaction",
"RequestUnfinishedBlock",
"RespondBlock",
"RespondBlocks",
"RespondCompactVDF",
"RespondEndOfSubSlot",
"RespondPeers",
"RespondProofOfWeight",
"RespondSignagePoint",
"RespondTransaction",
"RespondUnfinishedBlock",
}
harvester_msgs = {
"HarvesterHandshake",
"NewProofOfSpace",
"NewSignagePointHarvester",
"Plot",
"PlotSyncDone",
"PlotSyncError",
"PlotSyncIdentifier",
"PlotSyncPathList",
"PlotSyncPlotList",
"PlotSyncResponse",
"PlotSyncStart",
"PoolDifficulty",
"RequestPlots",
"RequestSignatures",
"RespondPlots",
"RespondSignatures",
}
introducer_msgs = {"RequestPeersIntroducer", "RespondPeersIntroducer"}
pool_msgs = {
"AuthenticationPayload",
"ErrorResponse",
"GetFarmerResponse",
"GetPoolInfoResponse",
"PoolErrorCode",
"PostFarmerPayload",
"PostFarmerRequest",
"PostFarmerResponse",
"PostPartialPayload",
"PostPartialRequest",
"PostPartialResponse",
"PutFarmerPayload",
"PutFarmerRequest",
"PutFarmerResponse",
"get_current_authentication_token",
"validate_authentication_token",
}
timelord_msgs = {
"NewEndOfSubSlotVDF",
"NewInfusionPointVDF",
"NewPeakTimelord",
"NewSignagePointVDF",
"NewUnfinishedBlockTimelord",
"RequestCompactProofOfTime",
"RespondCompactProofOfTime",
}
shared_msgs = {"Handshake", "Capability"}
# if these asserts fail, make sure to add the new network protocol messages
# to the visitor in build_network_protocol_files.py and rerun it. Then
# update this test
assert (
types_in_module(wallet_protocol) == wallet_msgs
), "message types were added or removed from wallet_protocol. Make sure to update the protocol message regression test to include the new message"
assert (
types_in_module(farmer_protocol) == farmer_msgs
), "message types were added or removed from farmer_protocol. Make sure to update the protocol message regression test to include the new message"
assert (
types_in_module(full_node_protocol) == full_node_msgs
), "message types were added or removed from full_node_protocol. Make sure to update the protocol message regression test to include the new message"
assert (
types_in_module(harvester_protocol) == harvester_msgs
), "message types were added or removed from harvester_protocol. Make sure to update the protocol message regression test to include the new message"
assert (
types_in_module(introducer_protocol) == introducer_msgs
), "message types were added or removed from introducer_protocol. Make sure to update the protocol message regression test to include the new message"
assert (
types_in_module(pool_protocol) == pool_msgs
), "message types were added or removed from pool_protocol. Make sure to update the protocol message regression test to include the new message"
assert (
types_in_module(timelord_protocol) == timelord_msgs
), "message types were added or removed from timelord_protocol. Make sure to update the protocol message regression test to include the new message"
assert (
types_in_module(shared_protocol) == shared_msgs
), "message types were added or removed from shared_protocol. Make sure to update the protocol message regression test to include the new message"
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/plot_sync/test_receiver.py | tests/plot_sync/test_receiver.py | from __future__ import annotations
import dataclasses
import logging
import random
import time
from secrets import token_bytes
from typing import Any, Callable, List, Tuple, Type, Union
import pytest
from blspy import G1Element
from flax.plot_sync.delta import Delta
from flax.plot_sync.receiver import Receiver, Sync
from flax.plot_sync.util import ErrorCodes, State
from flax.protocols.harvester_protocol import (
Plot,
PlotSyncDone,
PlotSyncIdentifier,
PlotSyncPathList,
PlotSyncPlotList,
PlotSyncResponse,
PlotSyncStart,
)
from flax.server.ws_connection import NodeType
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.ints import uint8, uint32, uint64
from flax.util.misc import get_list_or_len
from flax.util.streamable import _T_Streamable
from tests.plot_sync.util import get_dummy_connection
log = logging.getLogger(__name__)
next_message_id = uint64(0)
def assert_default_values(receiver: Receiver) -> None:
assert receiver.current_sync() == Sync()
assert receiver.last_sync() == Sync()
assert receiver.plots() == {}
assert receiver.invalid() == []
assert receiver.keys_missing() == []
assert receiver.duplicates() == []
async def dummy_callback(_: bytes32, __: Delta) -> None:
pass
class SyncStepData:
state: State
function: Any
payload_type: Any
args: Any
def __init__(
self, state: State, function: Callable[[_T_Streamable], Any], payload_type: Type[_T_Streamable], *args: Any
) -> None:
self.state = state
self.function = function
self.payload_type = payload_type
self.args = args
def plot_sync_identifier(current_sync_id: uint64, message_id: uint64) -> PlotSyncIdentifier:
return PlotSyncIdentifier(uint64(0), current_sync_id, message_id)
def create_payload(payload_type: Any, start: bool, *args: Any) -> Any:
global next_message_id
if start:
next_message_id = uint64(0)
next_identifier = plot_sync_identifier(uint64(1), next_message_id)
next_message_id = uint64(next_message_id + 1)
return payload_type(next_identifier, *args)
def assert_error_response(plot_sync: Receiver, error_code: ErrorCodes) -> None:
connection = plot_sync.connection()
assert connection is not None
message = connection.last_sent_message
assert message is not None
response: PlotSyncResponse = PlotSyncResponse.from_bytes(message.data)
assert response.error is not None
assert response.error.code == error_code.value
def pre_function_validate(receiver: Receiver, data: Union[List[Plot], List[str]], expected_state: State) -> None:
if expected_state == State.loaded:
for plot_info in data:
assert type(plot_info) == Plot
assert plot_info.filename not in receiver.plots()
elif expected_state == State.removed:
for path in data:
assert path in receiver.plots()
elif expected_state == State.invalid:
for path in data:
assert path not in receiver.invalid()
elif expected_state == State.keys_missing:
for path in data:
assert path not in receiver.keys_missing()
elif expected_state == State.duplicates:
for path in data:
assert path not in receiver.duplicates()
def post_function_validate(receiver: Receiver, data: Union[List[Plot], List[str]], expected_state: State) -> None:
if expected_state == State.loaded:
for plot_info in data:
assert type(plot_info) == Plot
assert plot_info.filename in receiver._current_sync.delta.valid.additions
elif expected_state == State.removed:
for path in data:
assert path in receiver._current_sync.delta.valid.removals
elif expected_state == State.invalid:
for path in data:
assert path in receiver._current_sync.delta.invalid.additions
elif expected_state == State.keys_missing:
for path in data:
assert path in receiver._current_sync.delta.keys_missing.additions
elif expected_state == State.duplicates:
for path in data:
assert path in receiver._current_sync.delta.duplicates.additions
@pytest.mark.asyncio
async def run_sync_step(receiver: Receiver, sync_step: SyncStepData) -> None:
assert receiver.current_sync().state == sync_step.state
last_sync_time_before = receiver._last_sync.time_done
# For the the list types invoke the trigger function in batches
if sync_step.payload_type == PlotSyncPlotList or sync_step.payload_type == PlotSyncPathList:
step_data, _ = sync_step.args
assert len(step_data) == 10
# Invoke batches of: 1, 2, 3, 4 items and validate the data against plot store before and after
indexes = [0, 1, 3, 6, 10]
for i in range(0, len(indexes) - 1):
plots_processed_before = receiver.current_sync().plots_processed
invoke_data = step_data[indexes[i] : indexes[i + 1]]
pre_function_validate(receiver, invoke_data, sync_step.state)
await sync_step.function(
create_payload(sync_step.payload_type, False, invoke_data, i == (len(indexes) - 2))
)
post_function_validate(receiver, invoke_data, sync_step.state)
if sync_step.state == State.removed:
assert receiver.current_sync().plots_processed == plots_processed_before
else:
assert receiver.current_sync().plots_processed == plots_processed_before + len(invoke_data)
else:
# For Start/Done just invoke it..
await sync_step.function(create_payload(sync_step.payload_type, sync_step.state == State.idle, *sync_step.args))
# Make sure we moved to the next state
assert receiver.current_sync().state != sync_step.state
if sync_step.payload_type == PlotSyncDone:
assert receiver._last_sync.time_done != last_sync_time_before
assert receiver.last_sync().plots_processed == receiver.last_sync().plots_total
else:
assert receiver._last_sync.time_done == last_sync_time_before
def plot_sync_setup() -> Tuple[Receiver, List[SyncStepData]]:
harvester_connection = get_dummy_connection(NodeType.HARVESTER)
receiver = Receiver(harvester_connection, dummy_callback) # type:ignore[arg-type]
# Create example plot data
path_list = [str(x) for x in range(0, 40)]
plot_info_list = [
Plot(
filename=str(x),
size=uint8(0),
plot_id=bytes32(token_bytes(32)),
pool_contract_puzzle_hash=None,
pool_public_key=None,
plot_public_key=G1Element(),
file_size=uint64(random.randint(0, 100)),
time_modified=uint64(0),
)
for x in path_list
]
# Manually add the plots we want to remove in tests
receiver._plots = {plot_info.filename: plot_info for plot_info in plot_info_list[0:10]}
receiver._total_plot_size = sum(plot.file_size for plot in receiver._plots.values())
sync_steps: List[SyncStepData] = [
SyncStepData(State.idle, receiver.sync_started, PlotSyncStart, False, uint64(0), uint32(len(plot_info_list))),
SyncStepData(State.loaded, receiver.process_loaded, PlotSyncPlotList, plot_info_list[10:20], True),
SyncStepData(State.removed, receiver.process_removed, PlotSyncPathList, path_list[0:10], True),
SyncStepData(State.invalid, receiver.process_invalid, PlotSyncPathList, path_list[20:30], True),
SyncStepData(State.keys_missing, receiver.process_keys_missing, PlotSyncPathList, path_list[30:40], True),
SyncStepData(State.duplicates, receiver.process_duplicates, PlotSyncPathList, path_list[10:20], True),
SyncStepData(State.done, receiver.sync_done, PlotSyncDone, uint64(0)),
]
return receiver, sync_steps
def test_default_values() -> None:
assert_default_values(Receiver(get_dummy_connection(NodeType.HARVESTER), dummy_callback)) # type:ignore[arg-type]
@pytest.mark.asyncio
async def test_reset() -> None:
receiver, sync_steps = plot_sync_setup()
connection_before = receiver.connection()
# Assign some dummy values
receiver._current_sync.state = State.done
receiver._current_sync.sync_id = uint64(1)
receiver._current_sync.next_message_id = uint64(1)
receiver._current_sync.plots_processed = uint32(1)
receiver._current_sync.plots_total = uint32(1)
receiver._current_sync.delta.valid.additions = receiver.plots().copy()
receiver._current_sync.delta.valid.removals = ["1"]
receiver._current_sync.delta.invalid.additions = ["1"]
receiver._current_sync.delta.invalid.removals = ["1"]
receiver._current_sync.delta.keys_missing.additions = ["1"]
receiver._current_sync.delta.keys_missing.removals = ["1"]
receiver._current_sync.delta.duplicates.additions = ["1"]
receiver._current_sync.delta.duplicates.removals = ["1"]
receiver._current_sync.time_done = time.time()
receiver._last_sync = dataclasses.replace(receiver._current_sync)
receiver._invalid = ["1"]
receiver._keys_missing = ["1"]
receiver._duplicates = ["1"]
receiver._last_sync.sync_id = uint64(1)
# Call `reset` and make sure all expected values are set back to their defaults.
receiver.reset()
assert_default_values(receiver)
assert receiver._current_sync.delta == Delta()
# Connection should remain
assert receiver.connection() == connection_before
@pytest.mark.parametrize("counts_only", [True, False])
@pytest.mark.asyncio
async def test_to_dict(counts_only: bool) -> None:
receiver, sync_steps = plot_sync_setup()
plot_sync_dict_1 = receiver.to_dict(counts_only)
assert get_list_or_len(plot_sync_dict_1["plots"], not counts_only) == 10
assert get_list_or_len(plot_sync_dict_1["failed_to_open_filenames"], not counts_only) == 0
assert get_list_or_len(plot_sync_dict_1["no_key_filenames"], not counts_only) == 0
assert get_list_or_len(plot_sync_dict_1["duplicates"], not counts_only) == 0
assert plot_sync_dict_1["total_plot_size"] == sum(plot.file_size for plot in receiver.plots().values())
assert plot_sync_dict_1["syncing"] is None
assert plot_sync_dict_1["last_sync_time"] is None
assert plot_sync_dict_1["connection"] == {
"node_id": receiver.connection().peer_node_id,
"host": receiver.connection().peer_host,
"port": receiver.connection().peer_port,
}
# We should get equal dicts
assert plot_sync_dict_1 == receiver.to_dict(counts_only)
# But unequal dicts wit the opposite counts_only value
assert plot_sync_dict_1 != receiver.to_dict(not counts_only)
expected_plot_files_processed: int = 0
expected_plot_files_total: int = sync_steps[State.idle].args[2]
# Walk through all states from idle to done and run them with the test data and validate the sync progress
for state in State:
await run_sync_step(receiver, sync_steps[state])
if state != State.idle and state != State.removed and state != State.done:
expected_plot_files_processed += len(sync_steps[state].args[0])
sync_data = receiver.to_dict()["syncing"]
if state == State.done:
expected_sync_data = None
else:
expected_sync_data = {
"initial": True,
"plot_files_processed": expected_plot_files_processed,
"plot_files_total": expected_plot_files_total,
}
assert sync_data == expected_sync_data
plot_sync_dict_3 = receiver.to_dict(counts_only)
assert get_list_or_len(sync_steps[State.loaded].args[0], counts_only) == plot_sync_dict_3["plots"]
assert (
get_list_or_len(sync_steps[State.invalid].args[0], counts_only) == plot_sync_dict_3["failed_to_open_filenames"]
)
assert get_list_or_len(sync_steps[State.keys_missing].args[0], counts_only) == plot_sync_dict_3["no_key_filenames"]
assert get_list_or_len(sync_steps[State.duplicates].args[0], counts_only) == plot_sync_dict_3["duplicates"]
assert plot_sync_dict_3["total_plot_size"] == sum(plot.file_size for plot in receiver.plots().values())
assert plot_sync_dict_3["last_sync_time"] > 0
assert plot_sync_dict_3["syncing"] is None
# Trigger a repeated plot sync
await receiver.sync_started(
PlotSyncStart(
PlotSyncIdentifier(uint64(time.time()), uint64(receiver.last_sync().sync_id + 1), uint64(0)),
False,
receiver.last_sync().sync_id,
uint32(1),
)
)
assert receiver.to_dict()["syncing"] == {
"initial": False,
"plot_files_processed": 0,
"plot_files_total": 1,
}
@pytest.mark.asyncio
async def test_sync_flow() -> None:
receiver, sync_steps = plot_sync_setup()
for plot_info in sync_steps[State.loaded].args[0]:
assert plot_info.filename not in receiver.plots()
for path in sync_steps[State.removed].args[0]:
assert path in receiver.plots()
for path in sync_steps[State.invalid].args[0]:
assert path not in receiver.invalid()
for path in sync_steps[State.keys_missing].args[0]:
assert path not in receiver.keys_missing()
for path in sync_steps[State.duplicates].args[0]:
assert path not in receiver.duplicates()
# Walk through all states from idle to done and run them with the test data
for state in State:
await run_sync_step(receiver, sync_steps[state])
for plot_info in sync_steps[State.loaded].args[0]:
assert plot_info.filename in receiver.plots()
for path in sync_steps[State.removed].args[0]:
assert path not in receiver.plots()
for path in sync_steps[State.invalid].args[0]:
assert path in receiver.invalid()
for path in sync_steps[State.keys_missing].args[0]:
assert path in receiver.keys_missing()
for path in sync_steps[State.duplicates].args[0]:
assert path in receiver.duplicates()
# We should be in idle state again
assert receiver.current_sync().state == State.idle
@pytest.mark.asyncio
async def test_invalid_ids() -> None:
receiver, sync_steps = plot_sync_setup()
for state in State:
assert receiver.current_sync().state == state
current_step = sync_steps[state]
if receiver.current_sync().state == State.idle:
# Set last_sync_id for the tests below
receiver._last_sync.sync_id = uint64(1)
# Test "sync_started last doesn't match"
invalid_last_sync_id_param = PlotSyncStart(
plot_sync_identifier(uint64(0), uint64(0)), False, uint64(2), uint32(0)
)
await current_step.function(invalid_last_sync_id_param)
assert_error_response(receiver, ErrorCodes.invalid_last_sync_id)
# Test "last_sync_id == new_sync_id"
invalid_sync_id_match_param = PlotSyncStart(
plot_sync_identifier(uint64(1), uint64(0)), False, uint64(1), uint32(0)
)
await current_step.function(invalid_sync_id_match_param)
assert_error_response(receiver, ErrorCodes.sync_ids_match)
# Reset the last_sync_id to the default
receiver._last_sync.sync_id = uint64(0)
else:
# Test invalid sync_id
invalid_sync_id_param = current_step.payload_type(
plot_sync_identifier(uint64(10), uint64(receiver.current_sync().next_message_id)), *current_step.args
)
await current_step.function(invalid_sync_id_param)
assert_error_response(receiver, ErrorCodes.invalid_identifier)
# Test invalid message_id
invalid_message_id_param = current_step.payload_type(
plot_sync_identifier(
receiver.current_sync().sync_id, uint64(receiver.current_sync().next_message_id + 1)
),
*current_step.args,
)
await current_step.function(invalid_message_id_param)
assert_error_response(receiver, ErrorCodes.invalid_identifier)
payload = create_payload(current_step.payload_type, state == State.idle, *current_step.args)
await current_step.function(payload)
@pytest.mark.parametrize(
["state_to_fail", "expected_error_code"],
[
pytest.param(State.loaded, ErrorCodes.plot_already_available, id="already available plots"),
pytest.param(State.invalid, ErrorCodes.plot_already_available, id="already available paths"),
pytest.param(State.removed, ErrorCodes.plot_not_available, id="not available"),
],
)
@pytest.mark.asyncio
async def test_plot_errors(state_to_fail: State, expected_error_code: ErrorCodes) -> None:
receiver, sync_steps = plot_sync_setup()
for state in State:
assert receiver.current_sync().state == state
current_step = sync_steps[state]
if state == state_to_fail:
plot_infos, _ = current_step.args
await current_step.function(create_payload(current_step.payload_type, False, plot_infos, False))
identifier = plot_sync_identifier(receiver.current_sync().sync_id, receiver.current_sync().next_message_id)
invalid_payload = current_step.payload_type(identifier, plot_infos, True)
await current_step.function(invalid_payload)
if state == state_to_fail:
assert_error_response(receiver, expected_error_code)
return
else:
await current_step.function(
create_payload(current_step.payload_type, state == State.idle, *current_step.args)
)
assert False, "Didn't fail in the expected state"
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/plot_sync/test_plot_sync.py | tests/plot_sync/test_plot_sync.py | from __future__ import annotations
import asyncio
import functools
from dataclasses import dataclass, field, replace
from pathlib import Path
from shutil import copy
from typing import Any, Callable, List, Optional, Tuple
import pytest
import pytest_asyncio
from blspy import G1Element
from flax.farmer.farmer_api import Farmer
from flax.harvester.harvester_api import Harvester
from flax.plot_sync.delta import Delta, PathListDelta, PlotListDelta
from flax.plot_sync.receiver import Receiver
from flax.plot_sync.sender import Sender
from flax.plot_sync.util import Constants, State
from flax.plotting.manager import PlotManager
from flax.plotting.util import add_plot_directory, remove_plot_directory
from flax.protocols.harvester_protocol import Plot
from flax.server.start_service import Service
from flax.server.ws_connection import ProtocolMessageTypes
from flax.simulator.block_tools import BlockTools
from flax.simulator.time_out_assert import time_out_assert
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.config import create_default_flax_config, lock_and_load_config, save_config
from flax.util.ints import uint8, uint32, uint64
from flax.util.streamable import _T_Streamable
from tests.plot_sync.util import start_harvester_service
from tests.plotting.test_plot_manager import Directory, MockPlotInfo
from tests.plotting.util import get_test_plots
def synced(sender: Sender, receiver: Receiver, previous_last_sync_id: int) -> bool:
return (
sender._last_sync_id != previous_last_sync_id
and sender._last_sync_id == receiver._last_sync.sync_id != 0
and receiver.current_sync().state == State.idle
and not sender.sync_active()
)
def assert_path_list_matches(expected_list: List[str], actual_list: List[str]) -> None:
assert len(expected_list) == len(actual_list)
for item in expected_list:
assert str(item) in actual_list
@dataclass
class ExpectedResult:
valid_count: int = 0
valid_delta: PlotListDelta = field(default_factory=PlotListDelta)
invalid_count: int = 0
invalid_delta: PathListDelta = field(default_factory=PathListDelta)
keys_missing_count: int = 0
keys_missing_delta: PathListDelta = field(default_factory=PathListDelta)
duplicates_count: int = 0
duplicates_delta: PathListDelta = field(default_factory=PathListDelta)
callback_passed: bool = False
def add_valid(self, list_plots: List[MockPlotInfo]) -> None:
def create_mock_plot(info: MockPlotInfo) -> Plot:
return Plot(
info.prover.get_filename(),
uint8(0),
bytes32(b"\x00" * 32),
None,
None,
G1Element(),
uint64(0),
uint64(0),
)
self.valid_count += len(list_plots)
self.valid_delta.additions.update({x.prover.get_filename(): create_mock_plot(x) for x in list_plots})
def remove_valid(self, list_paths: List[Path]) -> None:
self.valid_count -= len(list_paths)
self.valid_delta.removals += [str(x) for x in list_paths]
def add_invalid(self, list_paths: List[Path]) -> None:
self.invalid_count += len(list_paths)
self.invalid_delta.additions += [str(x) for x in list_paths]
def remove_invalid(self, list_paths: List[Path]) -> None:
self.invalid_count -= len(list_paths)
self.invalid_delta.removals += [str(x) for x in list_paths]
def add_keys_missing(self, list_paths: List[Path]) -> None:
self.keys_missing_count += len(list_paths)
self.keys_missing_delta.additions += [str(x) for x in list_paths]
def remove_keys_missing(self, list_paths: List[Path]) -> None:
self.keys_missing_count -= len(list_paths)
self.keys_missing_delta.removals += [str(x) for x in list_paths]
def add_duplicates(self, list_paths: List[Path]) -> None:
self.duplicates_count += len(list_paths)
self.duplicates_delta.additions += [str(x) for x in list_paths]
def remove_duplicates(self, list_paths: List[Path]) -> None:
self.duplicates_count -= len(list_paths)
self.duplicates_delta.removals += [str(x) for x in list_paths]
@dataclass
class Environment:
root_path: Path
harvester_services: List[Service[Harvester]]
farmer_service: Service[Farmer]
harvesters: List[Harvester]
farmer: Farmer
dir_1: Directory
dir_2: Directory
dir_3: Directory
dir_4: Directory
dir_invalid: Directory
dir_keys_missing: Directory
dir_duplicates: Directory
expected: List[ExpectedResult]
def get_harvester(self, peer_id: bytes32) -> Optional[Harvester]:
for harvester in self.harvesters:
assert harvester.server is not None
if harvester.server.node_id == peer_id:
return harvester
return None
def add_directory(self, harvester_index: int, directory: Directory, state: State = State.loaded) -> None:
try:
add_plot_directory(self.harvesters[harvester_index].root_path, str(directory.path))
except ValueError:
pass
if state == State.loaded:
self.expected[harvester_index].add_valid(directory.plot_info_list())
elif state == State.invalid:
self.expected[harvester_index].add_invalid(directory.path_list())
elif state == State.keys_missing:
self.expected[harvester_index].add_keys_missing(directory.path_list())
elif state == State.duplicates:
self.expected[harvester_index].add_duplicates(directory.path_list())
else:
assert False, "Invalid state"
def remove_directory(self, harvester_index: int, directory: Directory, state: State = State.removed) -> None:
remove_plot_directory(self.harvesters[harvester_index].root_path, str(directory.path))
if state == State.removed:
self.expected[harvester_index].remove_valid(directory.path_list())
elif state == State.invalid:
self.expected[harvester_index].remove_invalid(directory.path_list())
elif state == State.keys_missing:
self.expected[harvester_index].remove_keys_missing(directory.path_list())
elif state == State.duplicates:
self.expected[harvester_index].remove_duplicates(directory.path_list())
else:
assert False, "Invalid state"
def add_all_directories(self, harvester_index: int) -> None:
self.add_directory(harvester_index, self.dir_1)
self.add_directory(harvester_index, self.dir_2)
self.add_directory(harvester_index, self.dir_3)
self.add_directory(harvester_index, self.dir_4)
self.add_directory(harvester_index, self.dir_keys_missing, State.keys_missing)
self.add_directory(harvester_index, self.dir_invalid, State.invalid)
# Note: This does not add dir_duplicates since its important that the duplicated plots are loaded after the
# the original ones.
# self.add_directory(harvester_index, self.dir_duplicates, State.duplicates)
def remove_all_directories(self, harvester_index: int) -> None:
self.remove_directory(harvester_index, self.dir_1)
self.remove_directory(harvester_index, self.dir_2)
self.remove_directory(harvester_index, self.dir_3)
self.remove_directory(harvester_index, self.dir_4)
self.remove_directory(harvester_index, self.dir_keys_missing, State.keys_missing)
self.remove_directory(harvester_index, self.dir_invalid, State.invalid)
self.remove_directory(harvester_index, self.dir_duplicates, State.duplicates)
async def plot_sync_callback(self, peer_id: bytes32, delta: Optional[Delta]) -> None:
if delta is None:
return
harvester: Optional[Harvester] = self.get_harvester(peer_id)
assert harvester is not None
expected = self.expected[self.harvesters.index(harvester)]
assert len(expected.valid_delta.additions) == len(delta.valid.additions)
for path, plot_info in expected.valid_delta.additions.items():
assert path in delta.valid.additions
plot = harvester.plot_manager.plots.get(Path(path), None)
assert plot is not None
assert plot.prover.get_filename() == delta.valid.additions[path].filename
assert plot.prover.get_size() == delta.valid.additions[path].size
assert plot.prover.get_id() == delta.valid.additions[path].plot_id
assert plot.pool_public_key == delta.valid.additions[path].pool_public_key
assert plot.pool_contract_puzzle_hash == delta.valid.additions[path].pool_contract_puzzle_hash
assert plot.plot_public_key == delta.valid.additions[path].plot_public_key
assert plot.file_size == delta.valid.additions[path].file_size
assert int(plot.time_modified) == delta.valid.additions[path].time_modified
assert_path_list_matches(expected.valid_delta.removals, delta.valid.removals)
assert_path_list_matches(expected.invalid_delta.additions, delta.invalid.additions)
assert_path_list_matches(expected.invalid_delta.removals, delta.invalid.removals)
assert_path_list_matches(expected.keys_missing_delta.additions, delta.keys_missing.additions)
assert_path_list_matches(expected.keys_missing_delta.removals, delta.keys_missing.removals)
assert_path_list_matches(expected.duplicates_delta.additions, delta.duplicates.additions)
assert_path_list_matches(expected.duplicates_delta.removals, delta.duplicates.removals)
expected.valid_delta.clear()
expected.invalid_delta.clear()
expected.keys_missing_delta.clear()
expected.duplicates_delta.clear()
expected.callback_passed = True
async def run_sync_test(self) -> None:
plot_manager: PlotManager
assert len(self.harvesters) == len(self.expected)
last_sync_ids: List[uint64] = []
# Run the test in two steps, first trigger the refresh on both harvesters
for harvester in self.harvesters:
plot_manager = harvester.plot_manager
assert harvester.server is not None
receiver = self.farmer.plot_sync_receivers[harvester.server.node_id]
# Make sure to reset the passed flag always before a new run
self.expected[self.harvesters.index(harvester)].callback_passed = False
receiver._update_callback = self.plot_sync_callback
assert harvester.plot_sync_sender._last_sync_id == receiver._last_sync.sync_id
last_sync_ids.append(harvester.plot_sync_sender._last_sync_id)
plot_manager.start_refreshing()
plot_manager.trigger_refresh()
# Then wait for them to be synced with the farmer and validate them
for harvester in self.harvesters:
plot_manager = harvester.plot_manager
assert harvester.server is not None
receiver = self.farmer.plot_sync_receivers[harvester.server.node_id]
await time_out_assert(20, plot_manager.needs_refresh, value=False)
harvester_index = self.harvesters.index(harvester)
await time_out_assert(
10, synced, True, harvester.plot_sync_sender, receiver, last_sync_ids[harvester_index]
)
expected = self.expected[harvester_index]
assert plot_manager.plot_count() == len(receiver.plots()) == expected.valid_count
assert len(plot_manager.failed_to_open_filenames) == len(receiver.invalid()) == expected.invalid_count
assert len(plot_manager.no_key_filenames) == len(receiver.keys_missing()) == expected.keys_missing_count
assert len(plot_manager.get_duplicates()) == len(receiver.duplicates()) == expected.duplicates_count
assert expected.callback_passed
assert expected.valid_delta.empty()
assert expected.invalid_delta.empty()
assert expected.keys_missing_delta.empty()
assert expected.duplicates_delta.empty()
for path, plot_info in plot_manager.plots.items():
assert str(path) in receiver.plots()
assert plot_info.prover.get_filename() == receiver.plots()[str(path)].filename
assert plot_info.prover.get_size() == receiver.plots()[str(path)].size
assert plot_info.prover.get_id() == receiver.plots()[str(path)].plot_id
assert plot_info.pool_public_key == receiver.plots()[str(path)].pool_public_key
assert plot_info.pool_contract_puzzle_hash == receiver.plots()[str(path)].pool_contract_puzzle_hash
assert plot_info.plot_public_key == receiver.plots()[str(path)].plot_public_key
assert plot_info.file_size == receiver.plots()[str(path)].file_size
assert int(plot_info.time_modified) == receiver.plots()[str(path)].time_modified
for path in plot_manager.failed_to_open_filenames:
assert str(path) in receiver.invalid()
for path in plot_manager.no_key_filenames:
assert str(path) in receiver.keys_missing()
for path in plot_manager.get_duplicates():
assert str(path) in receiver.duplicates()
async def handshake_done(self, index: int) -> bool:
return (
self.harvesters[index].plot_manager._refresh_thread is not None
and len(self.harvesters[index].plot_manager.farmer_public_keys) > 0
)
@pytest_asyncio.fixture(scope="function")
async def environment(
tmp_path: Path, farmer_two_harvester_not_started: Tuple[List[Service[Harvester]], Service[Farmer], BlockTools]
) -> Environment:
def new_test_dir(name: str, plot_list: List[Path]) -> Directory:
return Directory(tmp_path / "plots" / name, plot_list)
plots: List[Path] = get_test_plots()
plots_invalid: List[Path] = get_test_plots()[0:3]
plots_keys_missing: List[Path] = get_test_plots("not_in_keychain")
# Create 4 directories where: dir_n contains n plots
directories: List[Directory] = []
offset: int = 0
while len(directories) < 4:
dir_number = len(directories) + 1
directories.append(new_test_dir(f"{dir_number}", plots[offset : offset + dir_number]))
offset += dir_number
dir_invalid: Directory = new_test_dir("invalid", plots_invalid)
dir_keys_missing: Directory = new_test_dir("keys_missing", plots_keys_missing)
dir_duplicates: Directory = new_test_dir("duplicates", directories[3].plots)
create_default_flax_config(tmp_path)
# Invalidate the plots in `dir_invalid`
for path in dir_invalid.path_list():
with open(path, "wb") as file:
file.write(bytes(100))
harvester_services, farmer_service, bt = farmer_two_harvester_not_started
farmer: Farmer = farmer_service._node
await farmer_service.start()
harvesters: List[Harvester] = [
await start_harvester_service(service, farmer_service) for service in harvester_services
]
for harvester in harvesters:
# Remove default plot directory for this tests
with lock_and_load_config(harvester.root_path, "config.yaml") as config:
config["harvester"]["plot_directories"] = []
save_config(harvester.root_path, "config.yaml", config)
harvester.plot_manager.set_public_keys(
bt.plot_manager.farmer_public_keys.copy(), bt.plot_manager.pool_public_keys.copy()
)
assert len(farmer.plot_sync_receivers) == 2
return Environment(
tmp_path,
harvester_services,
farmer_service,
harvesters,
farmer,
directories[0],
directories[1],
directories[2],
directories[3],
dir_invalid,
dir_keys_missing,
dir_duplicates,
[ExpectedResult() for _ in harvesters],
)
@pytest.mark.asyncio
async def test_sync_valid(environment: Environment) -> None:
env: Environment = environment
env.add_directory(0, env.dir_1)
env.add_directory(1, env.dir_2)
await env.run_sync_test()
# Run again two times to make sure we still get the same results in repeated refresh intervals
env.expected[0].valid_delta.clear()
env.expected[1].valid_delta.clear()
await env.run_sync_test()
await env.run_sync_test()
env.add_directory(0, env.dir_3)
env.add_directory(1, env.dir_4)
await env.run_sync_test()
while len(env.dir_3.path_list()):
drop_plot = env.dir_3.path_list()[0]
drop_plot.unlink()
env.dir_3.drop(drop_plot)
env.expected[0].remove_valid([drop_plot])
await env.run_sync_test()
env.remove_directory(0, env.dir_3)
await env.run_sync_test()
env.remove_directory(1, env.dir_4)
await env.run_sync_test()
env.remove_directory(0, env.dir_1)
env.remove_directory(1, env.dir_2)
await env.run_sync_test()
@pytest.mark.asyncio
async def test_sync_invalid(environment: Environment) -> None:
env: Environment = environment
assert len(env.farmer.plot_sync_receivers) == 2
# Use dir_3 and dir_4 in this test because the invalid plots are copies from dir_1 + dir_2
env.add_directory(0, env.dir_3)
env.add_directory(0, env.dir_invalid, State.invalid)
env.add_directory(1, env.dir_4)
await env.run_sync_test()
# Run again two times to make sure we still get the same results in repeated refresh intervals
await env.run_sync_test()
await env.run_sync_test()
# Drop all but two of the invalid plots
assert len(env.dir_invalid) > 2
for _ in range(len(env.dir_invalid) - 2):
drop_plot = env.dir_invalid.path_list()[0]
drop_plot.unlink()
env.dir_invalid.drop(drop_plot)
env.expected[0].remove_invalid([drop_plot])
await env.run_sync_test()
assert len(env.dir_invalid) == 2
# Add the directory to the first harvester too
env.add_directory(1, env.dir_invalid, State.invalid)
await env.run_sync_test()
# Recover one the remaining invalid plot
for path in get_test_plots():
if path.name == env.dir_invalid.path_list()[0].name:
copy(path, env.dir_invalid.path)
for i in range(len(env.harvesters)):
env.expected[i].add_valid([env.dir_invalid.plot_info_list()[0]])
env.expected[i].remove_invalid([env.dir_invalid.path_list()[0]])
env.harvesters[i].plot_manager.refresh_parameter = replace(
env.harvesters[i].plot_manager.refresh_parameter, retry_invalid_seconds=uint32(0)
)
await env.run_sync_test()
for i in [0, 1]:
remove_plot_directory(env.harvesters[i].root_path, str(env.dir_invalid.path))
env.expected[i].remove_valid([env.dir_invalid.path_list()[0]])
env.expected[i].remove_invalid([env.dir_invalid.path_list()[1]])
await env.run_sync_test()
@pytest.mark.asyncio
async def test_sync_keys_missing(environment: Environment) -> None:
env: Environment = environment
env.add_directory(0, env.dir_1)
env.add_directory(0, env.dir_keys_missing, State.keys_missing)
env.add_directory(1, env.dir_2)
await env.run_sync_test()
# Run again two times to make sure we still get the same results in repeated refresh intervals
await env.run_sync_test()
await env.run_sync_test()
# Drop all but 2 plots with missing keys and test sync inbetween
assert len(env.dir_keys_missing) > 2
for _ in range(len(env.dir_keys_missing) - 2):
drop_plot = env.dir_keys_missing.path_list()[0]
drop_plot.unlink()
env.dir_keys_missing.drop(drop_plot)
env.expected[0].remove_keys_missing([drop_plot])
await env.run_sync_test()
assert len(env.dir_keys_missing) == 2
# Add the plots with missing keys to the other harvester
env.add_directory(0, env.dir_3)
env.add_directory(1, env.dir_keys_missing, State.keys_missing)
await env.run_sync_test()
# Add the missing keys to the first harvester's plot manager
env.harvesters[0].plot_manager.farmer_public_keys.append(G1Element())
env.harvesters[0].plot_manager.pool_public_keys.append(G1Element())
# And validate they become valid now
env.expected[0].add_valid(env.dir_keys_missing.plot_info_list())
env.expected[0].remove_keys_missing(env.dir_keys_missing.path_list())
await env.run_sync_test()
# Drop the valid plots from one harvester and the keys missing plots from the other harvester
env.remove_directory(0, env.dir_keys_missing)
env.remove_directory(1, env.dir_keys_missing, State.keys_missing)
await env.run_sync_test()
@pytest.mark.asyncio
async def test_sync_duplicates(environment: Environment) -> None:
env: Environment = environment
# dir_4 and then dir_duplicates contain the same plots. Load dir_4 first to make sure the plots seen as duplicates
# are from dir_duplicates.
env.add_directory(0, env.dir_4)
await env.run_sync_test()
env.add_directory(0, env.dir_duplicates, State.duplicates)
env.add_directory(1, env.dir_2)
await env.run_sync_test()
# Run again two times to make sure we still get the same results in repeated refresh intervals
await env.run_sync_test()
await env.run_sync_test()
# Drop all but 1 duplicates and test sync in-between
assert len(env.dir_duplicates) > 2
for _ in range(len(env.dir_duplicates) - 2):
drop_plot = env.dir_duplicates.path_list()[0]
drop_plot.unlink()
env.dir_duplicates.drop(drop_plot)
env.expected[0].remove_duplicates([drop_plot])
await env.run_sync_test()
assert len(env.dir_duplicates) == 2
# Removing dir_4 now leads to the plots in dir_duplicates to become loaded instead
env.remove_directory(0, env.dir_4)
env.expected[0].remove_duplicates(env.dir_duplicates.path_list())
env.expected[0].add_valid(env.dir_duplicates.plot_info_list())
await env.run_sync_test()
async def add_and_validate_all_directories(env: Environment) -> None:
# Add all available directories to both harvesters and make sure they load and get synced
env.add_all_directories(0)
env.add_all_directories(1)
await env.run_sync_test()
env.add_directory(0, env.dir_duplicates, State.duplicates)
env.add_directory(1, env.dir_duplicates, State.duplicates)
await env.run_sync_test()
async def remove_and_validate_all_directories(env: Environment) -> None:
# Remove all available directories to both harvesters and make sure they are removed and get synced
env.remove_all_directories(0)
env.remove_all_directories(1)
await env.run_sync_test()
@pytest.mark.asyncio
async def test_add_and_remove_all_directories(environment: Environment) -> None:
await add_and_validate_all_directories(environment)
await remove_and_validate_all_directories(environment)
@pytest.mark.asyncio
async def test_harvester_restart(environment: Environment) -> None:
env: Environment = environment
# Load all directories for both harvesters
await add_and_validate_all_directories(env)
# Stop the harvester and make sure the receiver gets dropped on the farmer and refreshing gets stopped
env.harvester_services[0].stop()
await env.harvester_services[0].wait_closed()
assert len(env.farmer.plot_sync_receivers) == 1
assert not env.harvesters[0].plot_manager._refreshing_enabled
assert not env.harvesters[0].plot_manager.needs_refresh()
# Start the harvester, wait for the handshake and make sure the receiver comes back
await start_harvester_service(env.harvester_services[0], env.farmer_service)
await time_out_assert(5, env.handshake_done, True, 0)
assert len(env.farmer.plot_sync_receivers) == 2
# Remove the duplicates dir to avoid conflicts with the original plots
env.remove_directory(0, env.dir_duplicates)
# Reset the expected data for harvester 0 and re-add all directories because of the restart
env.expected[0] = ExpectedResult()
env.add_all_directories(0)
# Run the refresh two times and make sure everything recovers and stays recovered after harvester restart
await env.run_sync_test()
env.add_directory(0, env.dir_duplicates, State.duplicates)
await env.run_sync_test()
@pytest.mark.asyncio
async def test_farmer_restart(environment: Environment) -> None:
env: Environment = environment
# Load all directories for both harvesters
await add_and_validate_all_directories(env)
last_sync_ids: List[uint64] = []
for i in range(0, len(env.harvesters)):
last_sync_ids.append(env.harvesters[i].plot_sync_sender._last_sync_id)
# Stop the farmer and make sure both receivers get dropped and refreshing gets stopped on the harvesters
env.farmer_service.stop()
await env.farmer_service.wait_closed()
assert len(env.farmer.plot_sync_receivers) == 0
assert not env.harvesters[0].plot_manager._refreshing_enabled
assert not env.harvesters[1].plot_manager._refreshing_enabled
# Start the farmer, wait for the handshake and make sure the receivers come back
await env.farmer_service.start()
await time_out_assert(5, env.handshake_done, True, 0)
await time_out_assert(5, env.handshake_done, True, 1)
assert len(env.farmer.plot_sync_receivers) == 2
# Do not use run_sync_test here, to have a more realistic test scenario just wait for the harvesters to be synced.
# The handshake should trigger re-sync.
for i in range(0, len(env.harvesters)):
harvester: Harvester = env.harvesters[i]
assert harvester.server is not None
receiver = env.farmer.plot_sync_receivers[harvester.server.node_id]
await time_out_assert(20, synced, True, harvester.plot_sync_sender, receiver, last_sync_ids[i])
# Validate the sync
for harvester in env.harvesters:
plot_manager: PlotManager = harvester.plot_manager
assert harvester.server is not None
receiver = env.farmer.plot_sync_receivers[harvester.server.node_id]
expected = env.expected[env.harvesters.index(harvester)]
assert plot_manager.plot_count() == len(receiver.plots()) == expected.valid_count
assert len(plot_manager.failed_to_open_filenames) == len(receiver.invalid()) == expected.invalid_count
assert len(plot_manager.no_key_filenames) == len(receiver.keys_missing()) == expected.keys_missing_count
assert len(plot_manager.get_duplicates()) == len(receiver.duplicates()) == expected.duplicates_count
@pytest.mark.asyncio
async def test_sync_start_and_disconnect_while_sync_is_active(
farmer_one_harvester: Tuple[List[Service[Harvester]], Service[Farmer], BlockTools]
) -> None:
harvesters, farmer_service, _ = farmer_one_harvester
harvester_service = harvesters[0]
harvester = harvester_service._node
farmer: Farmer = farmer_service._node
Constants.message_timeout = 3
async def receiver_available() -> bool:
return harvester.server.node_id in farmer.plot_sync_receivers
async def disconnecting_process(
self: Receiver, method: Callable[[_T_Streamable], Any], message_type: ProtocolMessageTypes, message: Any
) -> None:
if self.current_sync().state == State.loaded:
harvester.plot_manager.trigger_refresh()
await asyncio.sleep(2)
await self.connection().close()
return
await original_process(method, message_type, message)
# Wait for the receiver to show up
await time_out_assert(20, receiver_available)
receiver = farmer.plot_sync_receivers[harvester.server.node_id]
# And wait until the first sync from the harvester to the farmer is done
await time_out_assert(20, receiver.initial_sync, False)
# Replace the `Receiver._process` with `disconnecting_process` which triggers a plot manager refresh and disconnects
# the farmer from the harvester during an active sync.
original_process = receiver._process
receiver._process = functools.partial(disconnecting_process, receiver) # type: ignore[assignment]
# Trigger the refresh which leads to a new sync_start being triggered during the active sync.
harvester.plot_manager.trigger_refresh()
await time_out_assert(20, harvester.plot_sync_sender.sync_active)
# Now wait until the receiver disappears from the farmer's plot_sync_receivers which means its disconnected.
await time_out_assert(20, receiver_available, False)
# Wait until the sync was aborted
await time_out_assert(20, harvester.plot_sync_sender.sync_active, False)
# And then wait for the harvester to reconnect and the receiver to re-appear.
await time_out_assert(20, receiver_available, True)
# Make sure the receiver object has been changed because of the disconnect
assert farmer.plot_sync_receivers[harvester.server.node_id] is not receiver
receiver = farmer.plot_sync_receivers[harvester.server.node_id]
current_last_sync_id = receiver.last_sync().sync_id
# Now start another sync and wait for it to be done to make sure everything still works fine
harvester.plot_manager.trigger_refresh()
await time_out_assert(20, synced, True, harvester.plot_sync_sender, receiver, current_last_sync_id)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/plot_sync/util.py | tests/plot_sync/util.py | from __future__ import annotations
import time
from dataclasses import dataclass
from secrets import token_bytes
from typing import Optional
from flax.farmer.farmer import Farmer
from flax.harvester.harvester import Harvester
from flax.plot_sync.sender import Sender
from flax.protocols.harvester_protocol import PlotSyncIdentifier
from flax.server.start_service import Service
from flax.server.ws_connection import Message, NodeType
from flax.simulator.time_out_assert import time_out_assert
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.peer_info import PeerInfo
from flax.util.ints import uint64
@dataclass
class WSFlaxConnectionDummy:
connection_type: NodeType
peer_node_id: bytes32
peer_host: str = "localhost"
peer_port: int = 0
last_sent_message: Optional[Message] = None
async def send_message(self, message: Message) -> None:
self.last_sent_message = message
def get_dummy_connection(node_type: NodeType, peer_id: Optional[bytes32] = None) -> WSFlaxConnectionDummy:
return WSFlaxConnectionDummy(node_type, bytes32(token_bytes(32)) if peer_id is None else peer_id)
def plot_sync_identifier(current_sync_id: uint64, message_id: uint64) -> PlotSyncIdentifier:
return PlotSyncIdentifier(uint64(int(time.time())), current_sync_id, message_id)
async def start_harvester_service(harvester_service: Service[Harvester], farmer_service: Service[Farmer]) -> Harvester:
# Set the `last_refresh_time` of the plot manager to avoid initial plot loading
harvester: Harvester = harvester_service._node
harvester.plot_manager.last_refresh_time = time.time()
await harvester_service.start()
harvester_service.add_peer(PeerInfo(str(farmer_service.self_hostname), farmer_service._server.get_port()))
harvester.plot_manager.stop_refreshing()
assert harvester.plot_sync_sender._sync_id == 0
assert harvester.plot_sync_sender._next_message_id == 0
assert harvester.plot_sync_sender._last_sync_id == 0
assert harvester.plot_sync_sender._messages == []
def wait_for_farmer_connection(plot_sync_sender: Sender) -> bool:
return plot_sync_sender._connection is not None
await time_out_assert(10, wait_for_farmer_connection, True, harvester.plot_sync_sender)
return harvester
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/plot_sync/test_sender.py | tests/plot_sync/test_sender.py | from __future__ import annotations
import pytest
from flax.plot_sync.exceptions import AlreadyStartedError, InvalidConnectionTypeError
from flax.plot_sync.sender import ExpectedResponse, Sender
from flax.plot_sync.util import Constants
from flax.protocols.harvester_protocol import PlotSyncIdentifier, PlotSyncResponse
from flax.server.ws_connection import NodeType, ProtocolMessageTypes
from flax.simulator.block_tools import BlockTools
from flax.util.ints import int16, uint64
from tests.plot_sync.util import get_dummy_connection, plot_sync_identifier
def test_default_values(bt: BlockTools) -> None:
sender = Sender(bt.plot_manager)
assert sender._plot_manager == bt.plot_manager
assert sender._connection is None
assert sender._sync_id == uint64(0)
assert sender._next_message_id == uint64(0)
assert sender._messages == []
assert sender._last_sync_id == uint64(0)
assert not sender._stop_requested
assert sender._task is None
assert sender._response is None
def test_set_connection_values(bt: BlockTools) -> None:
farmer_connection = get_dummy_connection(NodeType.FARMER)
sender = Sender(bt.plot_manager)
# Test invalid NodeType values
for connection_type in NodeType:
if connection_type != NodeType.FARMER:
pytest.raises(
InvalidConnectionTypeError,
sender.set_connection,
get_dummy_connection(connection_type, farmer_connection.peer_node_id),
)
# Test setting a valid connection works
sender.set_connection(farmer_connection) # type:ignore[arg-type]
assert sender._connection is not None
assert sender._connection == farmer_connection # type: ignore[comparison-overlap]
@pytest.mark.asyncio
async def test_start_stop_send_task(bt: BlockTools) -> None:
sender = Sender(bt.plot_manager)
# Make sure starting/restarting works
for _ in range(2):
assert sender._task is None
await sender.start()
assert sender._task is not None
with pytest.raises(AlreadyStartedError):
await sender.start()
assert not sender._stop_requested
sender.stop()
assert sender._stop_requested
await sender.await_closed()
assert not sender._stop_requested
assert sender._task is None
def test_set_response(bt: BlockTools) -> None:
sender = Sender(bt.plot_manager)
def new_expected_response(sync_id: int, message_id: int, message_type: ProtocolMessageTypes) -> ExpectedResponse:
return ExpectedResponse(message_type, plot_sync_identifier(uint64(sync_id), uint64(message_id)))
def new_response_message(sync_id: int, message_id: int, message_type: ProtocolMessageTypes) -> PlotSyncResponse:
return PlotSyncResponse(
plot_sync_identifier(uint64(sync_id), uint64(message_id)), int16(int(message_type.value)), None
)
response_message = new_response_message(0, 1, ProtocolMessageTypes.plot_sync_start)
assert sender._response is None
# Should trigger unexpected response because `Farmer._response` is `None`
assert not sender.set_response(response_message)
# Set `Farmer._response` and make sure the response gets assigned properly
sender._response = new_expected_response(0, 1, ProtocolMessageTypes.plot_sync_start)
assert sender._response.message is None
assert sender.set_response(response_message)
assert sender._response.message is not None
# Should trigger unexpected response because we already received the message for the currently expected response
assert not sender.set_response(response_message)
# Test expired message
expected_response = new_expected_response(1, 0, ProtocolMessageTypes.plot_sync_start)
sender._response = expected_response
expired_identifier = PlotSyncIdentifier(
uint64(expected_response.identifier.timestamp - Constants.message_timeout - 1),
expected_response.identifier.sync_id,
expected_response.identifier.message_id,
)
expired_message = PlotSyncResponse(expired_identifier, int16(int(ProtocolMessageTypes.plot_sync_start.value)), None)
assert not sender.set_response(expired_message)
# Test invalid sync-id
sender._response = new_expected_response(2, 0, ProtocolMessageTypes.plot_sync_start)
assert not sender.set_response(new_response_message(3, 0, ProtocolMessageTypes.plot_sync_start))
# Test invalid message-id
sender._response = new_expected_response(2, 1, ProtocolMessageTypes.plot_sync_start)
assert not sender.set_response(new_response_message(2, 2, ProtocolMessageTypes.plot_sync_start))
# Test invalid message-type
sender._response = new_expected_response(3, 0, ProtocolMessageTypes.plot_sync_start)
assert not sender.set_response(new_response_message(3, 0, ProtocolMessageTypes.plot_sync_loaded))
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/plot_sync/config.py | tests/plot_sync/config.py | from __future__ import annotations
parallel = True
checkout_blocks_and_plots = True
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/plot_sync/__init__.py | tests/plot_sync/__init__.py | python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false | |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/plot_sync/test_delta.py | tests/plot_sync/test_delta.py | from __future__ import annotations
import logging
from typing import List
import pytest
from blspy import G1Element
from flax.plot_sync.delta import Delta, DeltaType, PathListDelta, PlotListDelta
from flax.protocols.harvester_protocol import Plot
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.ints import uint8, uint64
log = logging.getLogger(__name__)
def dummy_plot(path: str) -> Plot:
return Plot(path, uint8(32), bytes32(b"\00" * 32), G1Element(), None, G1Element(), uint64(0), uint64(0))
@pytest.mark.parametrize(
["delta"],
[
pytest.param(PathListDelta(), id="path list"),
pytest.param(PlotListDelta(), id="plot list"),
],
)
def test_list_delta(delta: DeltaType) -> None:
assert delta.empty()
if type(delta) == PathListDelta:
assert delta.additions == []
elif type(delta) == PlotListDelta:
assert delta.additions == {}
else:
assert False
assert delta.removals == []
assert delta.empty()
if type(delta) == PathListDelta:
delta.additions.append("0")
elif type(delta) == PlotListDelta:
delta.additions["0"] = dummy_plot("0")
else:
assert False, "Invalid delta type"
assert not delta.empty()
delta.removals.append("0")
assert not delta.empty()
delta.additions.clear()
assert not delta.empty()
delta.clear()
assert delta.empty()
@pytest.mark.parametrize(
["old", "new", "result"],
[
[[], [], PathListDelta()],
[["1"], ["0"], PathListDelta(["0"], ["1"])],
[["1", "2", "3"], ["1", "2", "3"], PathListDelta([], [])],
[["2", "1", "3"], ["2", "3", "1"], PathListDelta([], [])],
[["2"], ["2", "3", "1"], PathListDelta(["3", "1"], [])],
[["2"], ["1", "3"], PathListDelta(["1", "3"], ["2"])],
[["1"], ["1", "2", "3"], PathListDelta(["2", "3"], [])],
[[], ["1", "2", "3"], PathListDelta(["1", "2", "3"], [])],
[["-1"], ["1", "2", "3"], PathListDelta(["1", "2", "3"], ["-1"])],
[["-1", "1"], ["2", "3"], PathListDelta(["2", "3"], ["-1", "1"])],
[["-1", "1", "2"], ["2", "3"], PathListDelta(["3"], ["-1", "1"])],
[["-1", "2", "3"], ["2", "3"], PathListDelta([], ["-1"])],
[["-1", "2", "3", "-2"], ["2", "3"], PathListDelta([], ["-1", "-2"])],
[["-2", "2", "3", "-1"], ["2", "3"], PathListDelta([], ["-2", "-1"])],
],
)
def test_path_list_delta_from_lists(old: List[str], new: List[str], result: PathListDelta) -> None:
assert PathListDelta.from_lists(old, new) == result
def test_delta_empty() -> None:
delta: Delta = Delta()
all_deltas: List[DeltaType] = [delta.valid, delta.invalid, delta.keys_missing, delta.duplicates]
assert delta.empty()
for d1 in all_deltas:
delta.valid.additions["0"] = dummy_plot("0")
delta.invalid.additions.append("0")
delta.keys_missing.additions.append("0")
delta.duplicates.additions.append("0")
assert not delta.empty()
for d2 in all_deltas:
if d2 is not d1:
d2.clear()
assert not delta.empty()
assert not delta.empty()
d1.clear()
assert delta.empty()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/plot_sync/test_sync_simulated.py | tests/plot_sync/test_sync_simulated.py | from __future__ import annotations
import asyncio
import functools
import logging
import time
from dataclasses import dataclass, field
from enum import Enum
from pathlib import Path
from secrets import token_bytes
from typing import Any, Dict, List, Optional, Set, Tuple
import pytest
from blspy import G1Element
from flax.farmer.farmer_api import Farmer
from flax.harvester.harvester_api import Harvester
from flax.plot_sync.receiver import Receiver
from flax.plot_sync.sender import Sender
from flax.plot_sync.util import Constants
from flax.plotting.manager import PlotManager
from flax.plotting.util import PlotInfo
from flax.protocols.harvester_protocol import PlotSyncError, PlotSyncResponse
from flax.server.start_service import Service
from flax.server.ws_connection import ProtocolMessageTypes, WSFlaxConnection, make_msg
from flax.simulator.block_tools import BlockTools
from flax.simulator.time_out_assert import time_out_assert
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.generator_tools import list_to_batches
from flax.util.ints import int16, uint64
from tests.plot_sync.util import start_harvester_service
log = logging.getLogger(__name__)
class ErrorSimulation(Enum):
DropEveryFourthMessage = 1
DropThreeMessages = 2
RespondTooLateEveryFourthMessage = 3
RespondTwice = 4
NonRecoverableError = 5
NotConnected = 6
@dataclass
class TestData:
harvester: Harvester
plot_sync_sender: Sender
plot_sync_receiver: Receiver
event_loop: asyncio.AbstractEventLoop
plots: Dict[Path, PlotInfo] = field(default_factory=dict)
invalid: List[PlotInfo] = field(default_factory=list)
keys_missing: List[PlotInfo] = field(default_factory=list)
duplicates: List[PlotInfo] = field(default_factory=list)
async def run(
self,
*,
loaded: List[PlotInfo],
removed: List[PlotInfo],
invalid: List[PlotInfo],
keys_missing: List[PlotInfo],
duplicates: List[PlotInfo],
initial: bool,
) -> None:
for plot_info in loaded:
assert plot_info.prover.get_filename() not in self.plots
for plot_info in removed:
assert plot_info.prover.get_filename() in self.plots
self.invalid = invalid
self.keys_missing = keys_missing
self.duplicates = duplicates
removed_paths: List[Path] = [p.prover.get_filename() for p in removed] if removed is not None else []
invalid_dict: Dict[Path, int] = {p.prover.get_filename(): 0 for p in self.invalid}
keys_missing_set: Set[Path] = set([p.prover.get_filename() for p in self.keys_missing])
duplicates_set: Set[str] = set([p.prover.get_filename() for p in self.duplicates])
# Inject invalid plots into `PlotManager` of the harvester so that the callback calls below can use them
# to sync them to the farmer.
self.harvester.plot_manager.failed_to_open_filenames = invalid_dict
# Inject key missing plots into `PlotManager` of the harvester so that the callback calls below can use them
# to sync them to the farmer.
self.harvester.plot_manager.no_key_filenames = keys_missing_set
# Inject duplicated plots into `PlotManager` of the harvester so that the callback calls below can use them
# to sync them to the farmer.
for plot_info in loaded:
plot_path = Path(plot_info.prover.get_filename())
self.harvester.plot_manager.plot_filename_paths[plot_path.name] = (str(plot_path.parent), set())
for duplicate in duplicates_set:
plot_path = Path(duplicate)
assert plot_path.name in self.harvester.plot_manager.plot_filename_paths
self.harvester.plot_manager.plot_filename_paths[plot_path.name][1].add(str(plot_path.parent))
batch_size = self.harvester.plot_manager.refresh_parameter.batch_size
# Used to capture the sync id in `run_internal`
sync_id: Optional[uint64] = None
def run_internal() -> None:
nonlocal sync_id
# Simulate one plot manager refresh cycle by calling the methods directly.
self.harvester.plot_sync_sender.sync_start(len(loaded), initial)
sync_id = self.plot_sync_sender._sync_id
if len(loaded) == 0:
self.harvester.plot_sync_sender.process_batch([], 0)
for remaining, batch in list_to_batches(loaded, batch_size):
self.harvester.plot_sync_sender.process_batch(batch, remaining)
self.harvester.plot_sync_sender.sync_done(removed_paths, 0)
await self.event_loop.run_in_executor(None, run_internal)
async def sync_done() -> bool:
assert sync_id is not None
return self.plot_sync_receiver.last_sync().sync_id == self.plot_sync_sender._last_sync_id == sync_id
await time_out_assert(60, sync_done)
for plot_info in loaded:
self.plots[plot_info.prover.get_filename()] = plot_info
for plot_info in removed:
del self.plots[plot_info.prover.get_filename()]
def validate_plot_sync(self) -> None:
assert len(self.plots) == len(self.plot_sync_receiver.plots())
assert len(self.invalid) == len(self.plot_sync_receiver.invalid())
assert len(self.keys_missing) == len(self.plot_sync_receiver.keys_missing())
for _, plot_info in self.plots.items():
assert plot_info.prover.get_filename() not in self.plot_sync_receiver.invalid()
assert plot_info.prover.get_filename() not in self.plot_sync_receiver.keys_missing()
assert plot_info.prover.get_filename() in self.plot_sync_receiver.plots()
synced_plot = self.plot_sync_receiver.plots()[plot_info.prover.get_filename()]
assert plot_info.prover.get_filename() == synced_plot.filename
assert plot_info.pool_public_key == synced_plot.pool_public_key
assert plot_info.pool_contract_puzzle_hash == synced_plot.pool_contract_puzzle_hash
assert plot_info.plot_public_key == synced_plot.plot_public_key
assert plot_info.file_size == synced_plot.file_size
assert uint64(int(plot_info.time_modified)) == synced_plot.time_modified
for plot_info in self.invalid:
assert plot_info.prover.get_filename() not in self.plot_sync_receiver.plots()
assert plot_info.prover.get_filename() in self.plot_sync_receiver.invalid()
assert plot_info.prover.get_filename() not in self.plot_sync_receiver.keys_missing()
assert plot_info.prover.get_filename() not in self.plot_sync_receiver.duplicates()
for plot_info in self.keys_missing:
assert plot_info.prover.get_filename() not in self.plot_sync_receiver.plots()
assert plot_info.prover.get_filename() not in self.plot_sync_receiver.invalid()
assert plot_info.prover.get_filename() in self.plot_sync_receiver.keys_missing()
assert plot_info.prover.get_filename() not in self.plot_sync_receiver.duplicates()
for plot_info in self.duplicates:
assert plot_info.prover.get_filename() not in self.plot_sync_receiver.invalid()
assert plot_info.prover.get_filename() not in self.plot_sync_receiver.keys_missing()
assert plot_info.prover.get_filename() in self.plot_sync_receiver.duplicates()
@dataclass
class TestRunner:
test_data: List[TestData]
def __init__(
self, harvesters: List[Harvester], farmer: Farmer, event_loop: asyncio.events.AbstractEventLoop
) -> None:
self.test_data = []
for harvester in harvesters:
assert harvester.server is not None
self.test_data.append(
TestData(
harvester,
harvester.plot_sync_sender,
farmer.plot_sync_receivers[harvester.server.node_id],
event_loop,
)
)
async def run(
self,
index: int,
*,
loaded: List[PlotInfo],
removed: List[PlotInfo],
invalid: List[PlotInfo],
keys_missing: List[PlotInfo],
duplicates: List[PlotInfo],
initial: bool,
) -> None:
await self.test_data[index].run(
loaded=loaded,
removed=removed,
invalid=invalid,
keys_missing=keys_missing,
duplicates=duplicates,
initial=initial,
)
for data in self.test_data:
data.validate_plot_sync()
async def skip_processing(self: Any, _: WSFlaxConnection, message_type: ProtocolMessageTypes, message: Any) -> bool:
self.message_counter += 1
if self.simulate_error == ErrorSimulation.DropEveryFourthMessage:
if self.message_counter % 4 == 0:
return True
if self.simulate_error == ErrorSimulation.DropThreeMessages:
if 2 < self.message_counter < 6:
return True
if self.simulate_error == ErrorSimulation.RespondTooLateEveryFourthMessage:
if self.message_counter % 4 == 0:
await asyncio.sleep(Constants.message_timeout + 1)
return False
if self.simulate_error == ErrorSimulation.RespondTwice:
await self.connection().send_message(
make_msg(
ProtocolMessageTypes.plot_sync_response,
PlotSyncResponse(message.identifier, int16(message_type.value), None),
)
)
if self.simulate_error == ErrorSimulation.NonRecoverableError and self.message_counter > 1:
await self.connection().send_message(
make_msg(
ProtocolMessageTypes.plot_sync_response,
PlotSyncResponse(
message.identifier, int16(message_type.value), PlotSyncError(int16(0), "non recoverable", None)
),
)
)
self.simulate_error = 0
return True
return False
async def _testable_process(
self: Any, peer: WSFlaxConnection, message_type: ProtocolMessageTypes, message: Any
) -> None:
if await skip_processing(self, peer, message_type, message):
return
await self.original_process(peer, message_type, message)
async def create_test_runner(
harvester_services: List[Service[Harvester]],
farmer_service: Service[Farmer],
event_loop: asyncio.events.AbstractEventLoop,
) -> TestRunner:
await farmer_service.start()
farmer: Farmer = farmer_service._node
assert len(farmer.plot_sync_receivers) == 0
harvesters: List[Harvester] = [
await start_harvester_service(service, farmer_service) for service in harvester_services
]
for receiver in farmer.plot_sync_receivers.values():
receiver.simulate_error = 0 # type: ignore[attr-defined]
receiver.message_counter = 0 # type: ignore[attr-defined]
receiver.original_process = receiver._process # type: ignore[attr-defined]
receiver._process = functools.partial(_testable_process, receiver) # type: ignore[assignment]
return TestRunner(harvesters, farmer, event_loop)
def create_example_plots(count: int) -> List[PlotInfo]:
@dataclass
class DiskProver:
file_name: str
plot_id: bytes32
size: int
def get_filename(self) -> str:
return self.file_name
def get_id(self) -> bytes32:
return self.plot_id
def get_size(self) -> int:
return self.size
return [
PlotInfo(
prover=DiskProver(f"{x}", bytes32(token_bytes(32)), x % 255),
pool_public_key=None,
pool_contract_puzzle_hash=None,
plot_public_key=G1Element(),
file_size=uint64(0),
time_modified=time.time(),
)
for x in range(0, count)
]
@pytest.mark.asyncio
async def test_sync_simulated(
farmer_three_harvester_not_started: Tuple[List[Service[Harvester]], Service[Farmer], BlockTools],
event_loop: asyncio.events.AbstractEventLoop,
) -> None:
harvester_services, farmer_service, _ = farmer_three_harvester_not_started
farmer: Farmer = farmer_service._node
test_runner: TestRunner = await create_test_runner(harvester_services, farmer_service, event_loop)
plots = create_example_plots(31000)
await test_runner.run(
0, loaded=plots[0:10000], removed=[], invalid=[], keys_missing=[], duplicates=plots[0:1000], initial=True
)
await test_runner.run(
1,
loaded=plots[10000:20000],
removed=[],
invalid=plots[30000:30100],
keys_missing=[],
duplicates=[],
initial=True,
)
await test_runner.run(
2,
loaded=plots[20000:30000],
removed=[],
invalid=[],
keys_missing=plots[30100:30200],
duplicates=[],
initial=True,
)
await test_runner.run(
0,
loaded=[],
removed=[],
invalid=plots[30300:30400],
keys_missing=plots[30400:30453],
duplicates=[],
initial=False,
)
await test_runner.run(0, loaded=[], removed=[], invalid=[], keys_missing=[], duplicates=[], initial=False)
await test_runner.run(
0, loaded=[], removed=plots[5000:10000], invalid=[], keys_missing=[], duplicates=[], initial=False
)
await test_runner.run(
1, loaded=[], removed=plots[10000:20000], invalid=[], keys_missing=[], duplicates=[], initial=False
)
await test_runner.run(
2, loaded=[], removed=plots[20000:29000], invalid=[], keys_missing=[], duplicates=[], initial=False
)
await test_runner.run(
0, loaded=[], removed=plots[0:5000], invalid=[], keys_missing=[], duplicates=[], initial=False
)
await test_runner.run(
2,
loaded=plots[5000:10000],
removed=plots[29000:30000],
invalid=plots[30000:30500],
keys_missing=plots[30500:31000],
duplicates=plots[5000:6000],
initial=False,
)
await test_runner.run(
2, loaded=[], removed=plots[5000:10000], invalid=[], keys_missing=[], duplicates=[], initial=False
)
assert len(farmer.plot_sync_receivers) == 3
for plot_sync in farmer.plot_sync_receivers.values():
assert len(plot_sync.plots()) == 0
@pytest.mark.parametrize(
"simulate_error",
[
ErrorSimulation.DropEveryFourthMessage,
ErrorSimulation.DropThreeMessages,
ErrorSimulation.RespondTooLateEveryFourthMessage,
ErrorSimulation.RespondTwice,
],
)
@pytest.mark.asyncio
async def test_farmer_error_simulation(
farmer_one_harvester_not_started: Tuple[List[Service[Harvester]], Service[Farmer], BlockTools],
event_loop: asyncio.events.AbstractEventLoop,
simulate_error: ErrorSimulation,
) -> None:
Constants.message_timeout = 5
harvester_services, farmer_service, _ = farmer_one_harvester_not_started
test_runner: TestRunner = await create_test_runner(harvester_services, farmer_service, event_loop)
batch_size = test_runner.test_data[0].harvester.plot_manager.refresh_parameter.batch_size
plots = create_example_plots(batch_size + 3)
receiver = test_runner.test_data[0].plot_sync_receiver
receiver.simulate_error = simulate_error # type: ignore[attr-defined]
await test_runner.run(
0,
loaded=plots[0 : batch_size + 1],
removed=[],
invalid=[plots[batch_size + 1]],
keys_missing=[plots[batch_size + 2]],
duplicates=[],
initial=True,
)
@pytest.mark.parametrize("simulate_error", [ErrorSimulation.NonRecoverableError, ErrorSimulation.NotConnected])
@pytest.mark.asyncio
async def test_sync_reset_cases(
farmer_one_harvester_not_started: Tuple[List[Service[Harvester]], Service[Farmer], BlockTools],
event_loop: asyncio.events.AbstractEventLoop,
simulate_error: ErrorSimulation,
) -> None:
harvester_services, farmer_service, _ = farmer_one_harvester_not_started
test_runner: TestRunner = await create_test_runner(harvester_services, farmer_service, event_loop)
test_data: TestData = test_runner.test_data[0]
plot_manager: PlotManager = test_data.harvester.plot_manager
plots = create_example_plots(30)
# Inject some data into `PlotManager` of the harvester so that we can validate the reset worked and triggered a
# fresh sync of all available data of the plot manager
for plot_info in plots[0:10]:
test_data.plots[plot_info.prover.get_filename()] = plot_info
plot_manager.plots = test_data.plots
test_data.invalid = plots[10:20]
test_data.keys_missing = plots[20:30]
test_data.plot_sync_receiver.simulate_error = simulate_error # type: ignore[attr-defined]
sender: Sender = test_runner.test_data[0].plot_sync_sender
started_sync_id: uint64 = uint64(0)
plot_manager.failed_to_open_filenames = {p.prover.get_filename(): 0 for p in test_data.invalid}
plot_manager.no_key_filenames = set([p.prover.get_filename() for p in test_data.keys_missing])
async def wait_for_reset() -> bool:
assert started_sync_id != 0
return sender._sync_id != started_sync_id != 0
async def sync_done() -> bool:
assert started_sync_id != 0
return test_data.plot_sync_receiver.last_sync().sync_id == sender._last_sync_id == started_sync_id
# Send start and capture the sync_id
sender.sync_start(len(plots), True)
started_sync_id = sender._sync_id
# Sleep 2 seconds to make sure we have a different sync_id after the reset which gets triggered
await asyncio.sleep(2)
saved_connection = sender._connection
if simulate_error == ErrorSimulation.NotConnected:
sender._connection = None
sender.process_batch(plots, 0)
await time_out_assert(60, wait_for_reset)
started_sync_id = sender._sync_id
sender._connection = saved_connection
await time_out_assert(60, sync_done)
test_runner.test_data[0].validate_plot_sync()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/tools/test_run_block.py | tests/tools/test_run_block.py | from __future__ import annotations
import json
from pathlib import Path
from typing import List
from flax.consensus.default_constants import DEFAULT_CONSTANTS
from flax.types.condition_opcodes import ConditionOpcode
from flax.types.condition_with_args import ConditionWithArgs
from tools.run_block import run_json_block
testnet10 = {
"AGG_SIG_ME_ADDITIONAL_DATA": bytes.fromhex("ae83525ba8d1dd3f09b277de18ca3e43fc0af20d20c4b3e92ef2a48bd291ccb2"),
"DIFFICULTY_CONSTANT_FACTOR": 10052721566054,
"DIFFICULTY_STARTING": 30,
"EPOCH_BLOCKS": 768,
"GENESIS_CHALLENGE": bytes.fromhex("ae83525ba8d1dd3f09b277de18ca3e43fc0af20d20c4b3e92ef2a48bd291ccb2"),
"GENESIS_PRE_FARM_FARMER_PUZZLE_HASH": bytes.fromhex(
"3d8765d3a597ec1d99663f6c9816d915b9f68613ac94009884c4addaefcce6af"
),
"GENESIS_PRE_FARM_POOL_PUZZLE_HASH": bytes.fromhex(
"d23da14695a188ae5708dd152263c4db883eb27edeb936178d4d988b8f3ce5fc"
),
"MEMPOOL_BLOCK_BUFFER": 10,
"MIN_PLOT_SIZE": 18,
}
constants = DEFAULT_CONSTANTS.replace(**testnet10)
retire_bytes = (
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
)
def find_retirement(tocheck: List[ConditionWithArgs]) -> bool:
for c in tocheck:
if c.opcode != ConditionOpcode.CREATE_COIN:
continue
if len(c.vars) < 3:
continue
if c.vars[2] == retire_bytes:
return True
return False
def test_block_no_generator():
dirname = Path(__file__).parent
with open(dirname / "300000.json") as f:
full_block = json.load(f)
cat_list = run_json_block(full_block, dirname, constants)
assert not cat_list
def test_block_retired_cat_with_memo():
dirname = Path(__file__).parent
with open(dirname / "1315630.json") as f:
full_block = json.load(f)
cat_list = run_json_block(full_block, dirname, constants)
assert cat_list
assert cat_list[0].asset_id == "c2808f37e758b713150da4860091dd94a90a781bc4f18377d20de6291b3d506d"
assert cat_list[0].memo == "Hello, please find me, I'm a memo!"
assert cat_list[0].npc.coin_name.hex() == "cc6dca2748865d77eb411e3a44827ad970a0cd8488ad26f6a83842fe4e0e4054"
assert cat_list[0].npc.puzzle_hash.hex() == "c621cd597aa525338d3e4e499a34e0d0b1040304a2f4766b48a368aa57d3ab6f"
found = False
for cond in cat_list[0].npc.conditions:
if cond[0] != ConditionOpcode.CREATE_COIN:
continue
found |= find_retirement(cond[1])
assert found
def test_block_retired_cat_no_memo():
dirname = Path(__file__).parent
with open(dirname / "1315544.json") as f:
full_block = json.load(f)
cat_list = run_json_block(full_block, dirname, constants)
assert cat_list
assert cat_list[0].asset_id == "c2808f37e758b713150da4860091dd94a90a781bc4f18377d20de6291b3d506d"
assert not cat_list[0].memo
assert cat_list[0].npc.coin_name.hex() == "90941ac42b92aad0ed1de5d599d854072fcf1f4bb82cd37e365852f0a730cf5d"
assert cat_list[0].npc.puzzle_hash.hex() == "20a2284ec41cdcc3c54e6b44f8801db2dc28f3aa01c115674b598757d62f09a6"
found = False
for cond in cat_list[0].npc.conditions:
if cond[0] != ConditionOpcode.CREATE_COIN:
continue
found |= find_retirement(cond[1])
assert found
def test_block_cat():
dirname = Path(__file__).parent
with open(dirname / "1315537.json") as f:
full_block = json.load(f)
cat_list = run_json_block(full_block, dirname, constants)
assert cat_list
assert cat_list[0].asset_id == "c2808f37e758b713150da4860091dd94a90a781bc4f18377d20de6291b3d506d"
assert not cat_list[0].memo
assert cat_list[0].npc.coin_name.hex() == "6fb12ab32556537803112badcfaf828bfe1b79eb4181b3adc5d571680295ce6c"
assert cat_list[0].npc.puzzle_hash.hex() == "20a2284ec41cdcc3c54e6b44f8801db2dc28f3aa01c115674b598757d62f09a6"
def test_generator_ref():
"""Run a block containing a back reference without error"""
dirname = Path(__file__).parent
with open(dirname / "466212.json") as f:
full_block = json.load(f)
cat_list = run_json_block(full_block, dirname, constants)
assert cat_list == []
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/tools/config.py | tests/tools/config.py | from __future__ import annotations
parallel = True
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/tools/test_full_sync.py | tests/tools/test_full_sync.py | #!/usr/bin/env python3
from __future__ import annotations
import asyncio
import os
from pathlib import Path
import pytest
from tools.test_full_sync import run_sync_test
@pytest.mark.parametrize("keep_up", [True, False])
def test_full_sync_test(keep_up: bool):
file_path = os.path.realpath(__file__)
db_file = Path(file_path).parent / "test-blockchain-db.sqlite"
asyncio.run(
run_sync_test(
db_file,
db_version=2,
profile=False,
single_thread=False,
test_constants=False,
keep_up=keep_up,
db_sync="off",
node_profiler=False,
start_at_checkpoint=None,
)
)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/tools/__init__.py | tests/tools/__init__.py | python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false | |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/test_bech32m.py | tests/wallet/test_bech32m.py | # Based on this specification from Pieter Wuille:
# https://github.com/sipa/bips/blob/bip-bech32m/bip-bech32m.mediawiki
from __future__ import annotations
from flax.util.bech32m import bech32_decode
def test_valid_imports():
test_strings = [
"A1LQFN3A",
"a1lqfn3a",
"\n a1lqfn3a \n",
"an83characterlonghumanreadablepartthatcontainsthetheexcludedcharactersbioandnumber11sg7hg6",
"abcdef1l7aum6echk45nj3s0wdvt2fg8x9yrzpqzd3ryx",
"11llllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllludsr8",
"split1checkupstagehandshakeupstreamerranterredcaperredlc445v",
"?1v759aa",
]
for test_str in test_strings:
hrp, data = bech32_decode(test_str)
assert data is not None
def test_invalid_imports():
test_strings = [
f"{0x20}1xj0phk",
f"{0x7F}1g6xzxy",
f"{0x80}1vctc34",
"an84characterslonghumanreadablepartthatcontainsthetheexcludedcharactersbioandnumber11d6pts4",
"qyrz8wqd2c9m",
"1qyrz8wqd2c9m",
"\n 1qyrz8wqd2c9m \n",
"y1b0jsk6g",
"lt1igcx5c0",
"in1muywd",
"mm1crxm3i",
"au1s5cgom",
"M1VUXWEZ",
"16plkw9",
"1p2gdwpf",
]
for test_str in test_strings:
hrp, data = bech32_decode(test_str)
assert data is None
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/test_wallet.py | tests/wallet/test_wallet.py | import asyncio
import time
from pathlib import Path
from typing import Any, Dict, List, Tuple
import pytest
from blspy import AugSchemeMPL, G1Element, G2Element
from flax.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward
from flax.protocols.full_node_protocol import RespondBlock
from flax.rpc.wallet_rpc_api import WalletRpcApi
from flax.server.server import FlaxServer
from flax.simulator.full_node_simulator import FullNodeSimulator
from flax.simulator.simulator_protocol import FarmNewBlockProtocol, ReorgProtocol
from flax.types.blockchain_format.program import Program
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.peer_info import PeerInfo
from flax.util.bech32m import encode_puzzle_hash
from flax.util.ints import uint16, uint32, uint64
from flax.wallet.derive_keys import master_sk_to_wallet_sk
from flax.wallet.transaction_record import TransactionRecord
from flax.wallet.util.compute_memos import compute_memos
from flax.wallet.util.transaction_type import TransactionType
from flax.wallet.util.wallet_types import AmountWithPuzzlehash
from flax.wallet.wallet_node import WalletNode, get_wallet_db_path
from flax.wallet.wallet_state_manager import WalletStateManager
from flax.simulator.block_tools import BlockTools
from flax.simulator.time_out_assert import time_out_assert, time_out_assert_not_none
from tests.util.wallet_is_synced import wallet_is_synced
from tests.wallet.cat_wallet.test_cat_wallet import tx_in_pool
class TestWalletSimulator:
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
async def test_wallet_coinbase(
self,
wallet_node_sim_and_wallet: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, FlaxServer]], BlockTools],
trusted: bool,
self_hostname: str,
) -> None:
num_blocks = 10
full_nodes, wallets, _ = wallet_node_sim_and_wallet
full_node_api = full_nodes[0]
server_1: FlaxServer = full_node_api.full_node.server
wallet_node, server_2 = wallets[0]
wallet = wallet_node.wallet_state_manager.main_wallet
ph = await wallet.get_new_puzzlehash()
if trusted:
wallet_node.config["trusted_peers"] = {server_1.node_id.hex(): server_1.node_id.hex()}
else:
wallet_node.config["trusted_peers"] = {}
await server_2.start_client(PeerInfo(self_hostname, uint16(server_1._port)), None)
for i in range(0, num_blocks):
await full_node_api.farm_new_block(FarmNewBlockProtocol(ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
funds = sum(
[
calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
for i in range(1, num_blocks + 2)
]
)
async def check_tx_are_pool_farm_rewards() -> bool:
wsm: WalletStateManager = wallet_node.wallet_state_manager
all_txs = await wsm.get_all_transactions(1)
expected_count = (num_blocks + 1) * 2
if len(all_txs) != expected_count:
return False
pool_rewards = 0
farm_rewards = 0
for tx in all_txs:
if TransactionType(tx.type) == TransactionType.COINBASE_REWARD:
pool_rewards += 1
elif TransactionType(tx.type) == TransactionType.FEE_REWARD:
farm_rewards += 1
if pool_rewards != expected_count / 2:
return False
if farm_rewards != expected_count / 2:
return False
return True
await time_out_assert(20, check_tx_are_pool_farm_rewards, True)
await time_out_assert(20, wallet.get_confirmed_balance, funds)
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
async def test_wallet_make_transaction(
self,
two_wallet_nodes: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, FlaxServer]], BlockTools],
trusted: bool,
self_hostname: str,
) -> None:
num_blocks = 5
full_nodes, wallets, _ = two_wallet_nodes
full_node_api = full_nodes[0]
server_1 = full_node_api.full_node.server
wallet_node, server_2 = wallets[0]
wallet_node_2, server_3 = wallets[1]
wallet = wallet_node.wallet_state_manager.main_wallet
ph = await wallet.get_new_puzzlehash()
if trusted:
wallet_node.config["trusted_peers"] = {server_1.node_id.hex(): server_1.node_id.hex()}
wallet_node_2.config["trusted_peers"] = {server_1.node_id.hex(): server_1.node_id.hex()}
else:
wallet_node.config["trusted_peers"] = {}
wallet_node_2.config["trusted_peers"] = {}
await server_2.start_client(PeerInfo(self_hostname, uint16(server_1._port)), None)
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
funds = sum(
[calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks)]
)
await time_out_assert(20, wallet.get_confirmed_balance, funds)
await time_out_assert(20, wallet.get_unconfirmed_balance, funds)
tx = await wallet.generate_signed_transaction(
uint64(10),
await wallet_node_2.wallet_state_manager.main_wallet.get_new_puzzlehash(),
uint64(0),
)
await wallet.push_transaction(tx)
await time_out_assert(20, wallet.get_confirmed_balance, funds)
await time_out_assert(20, wallet.get_unconfirmed_balance, funds - 10)
await time_out_assert(20, full_node_api.full_node.mempool_manager.get_spendbundle, tx.spend_bundle, tx.name)
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
new_funds = sum(
[
calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
for i in range(1, (2 * num_blocks))
]
)
await time_out_assert(30, wallet.get_confirmed_balance, new_funds - 10)
await time_out_assert(30, wallet.get_unconfirmed_balance, new_funds - 10)
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
async def test_wallet_coinbase_reorg(
self,
wallet_node_sim_and_wallet: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, FlaxServer]], BlockTools],
trusted: bool,
self_hostname: str,
) -> None:
num_blocks = 5
full_nodes, wallets, _ = wallet_node_sim_and_wallet
full_node_api = full_nodes[0]
fn_server = full_node_api.full_node.server
wallet_node, server_2 = wallets[0]
wallet = wallet_node.wallet_state_manager.main_wallet
ph = await wallet.get_new_puzzlehash()
if trusted:
wallet_node.config["trusted_peers"] = {fn_server.node_id.hex(): fn_server.node_id.hex()}
else:
wallet_node.config["trusted_peers"] = {}
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
await asyncio.sleep(5)
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
funds = sum(
[calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks)]
)
await time_out_assert(25, wallet.get_confirmed_balance, funds)
await full_node_api.reorg_from_index_to_new_index(
ReorgProtocol(uint32(2), uint32(num_blocks + 6), bytes32(32 * b"0"), None)
)
funds = sum(
[
calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
for i in range(1, num_blocks - 2)
]
)
await time_out_assert(20, wallet.get_confirmed_balance, funds)
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
async def test_wallet_send_to_three_peers(
self,
three_sim_two_wallets: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, FlaxServer]], BlockTools],
trusted: bool,
self_hostname: str,
) -> None:
num_blocks = 10
full_nodes, wallets, _ = three_sim_two_wallets
wallet_0, wallet_server_0 = wallets[0]
full_node_api_0 = full_nodes[0]
full_node_api_1 = full_nodes[1]
full_node_api_2 = full_nodes[2]
full_node_0 = full_node_api_0.full_node
full_node_1 = full_node_api_1.full_node
full_node_2 = full_node_api_2.full_node
server_0 = full_node_0.server
server_1 = full_node_1.server
server_2 = full_node_2.server
ph = await wallet_0.wallet_state_manager.main_wallet.get_new_puzzlehash()
if trusted:
wallet_0.config["trusted_peers"] = {
server_0.node_id.hex(): server_0.node_id.hex(),
server_1.node_id.hex(): server_1.node_id.hex(),
server_2.node_id.hex(): server_2.node_id.hex(),
}
else:
wallet_0.config["trusted_peers"] = {}
# wallet0 <-> sever0
await wallet_server_0.start_client(PeerInfo(self_hostname, uint16(server_0._port)), None)
for i in range(0, num_blocks):
await full_node_api_0.farm_new_transaction_block(FarmNewBlockProtocol(ph))
all_blocks = await full_node_api_0.get_all_full_blocks()
for block in all_blocks:
await full_node_1.respond_block(RespondBlock(block))
await full_node_2.respond_block(RespondBlock(block))
funds = sum(
[calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks)]
)
await time_out_assert(20, wallet_0.wallet_state_manager.main_wallet.get_confirmed_balance, funds)
tx = await wallet_0.wallet_state_manager.main_wallet.generate_signed_transaction(
uint64(10), bytes32(32 * b"0"), uint64(0)
)
assert tx.spend_bundle is not None
await wallet_0.wallet_state_manager.main_wallet.push_transaction(tx)
await time_out_assert_not_none(20, full_node_0.mempool_manager.get_spendbundle, tx.spend_bundle.name())
# wallet0 <-> sever1
await wallet_server_0.start_client(PeerInfo(self_hostname, uint16(server_1._port)), wallet_0.on_connect)
await time_out_assert_not_none(20, full_node_1.mempool_manager.get_spendbundle, tx.spend_bundle.name())
# wallet0 <-> sever2
await wallet_server_0.start_client(PeerInfo(self_hostname, uint16(server_2._port)), wallet_0.on_connect)
await time_out_assert_not_none(20, full_node_2.mempool_manager.get_spendbundle, tx.spend_bundle.name())
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
async def test_wallet_make_transaction_hop(
self,
two_wallet_nodes_five_freeze: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, FlaxServer]], BlockTools],
trusted: bool,
self_hostname: str,
) -> None:
num_blocks = 10
full_nodes, wallets, _ = two_wallet_nodes_five_freeze
full_node_api_0 = full_nodes[0]
full_node_0 = full_node_api_0.full_node
server_0 = full_node_0.server
wallet_node_0, wallet_0_server = wallets[0]
wallet_node_1, wallet_1_server = wallets[1]
wallet_0 = wallet_node_0.wallet_state_manager.main_wallet
wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
ph = await wallet_0.get_new_puzzlehash()
if trusted:
wallet_node_0.config["trusted_peers"] = {server_0.node_id.hex(): server_0.node_id.hex()}
wallet_node_1.config["trusted_peers"] = {server_0.node_id.hex(): server_0.node_id.hex()}
else:
wallet_node_0.config["trusted_peers"] = {}
wallet_node_1.config["trusted_peers"] = {}
await wallet_0_server.start_client(PeerInfo(self_hostname, uint16(server_0._port)), None)
await wallet_1_server.start_client(PeerInfo(self_hostname, uint16(server_0._port)), None)
for i in range(0, num_blocks):
await full_node_api_0.farm_new_transaction_block(FarmNewBlockProtocol(ph))
funds = sum(
[calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks)]
)
await time_out_assert(90, wallet_is_synced, True, wallet_node_0, full_node_api_0)
await time_out_assert(20, wallet_0.get_confirmed_balance, funds)
await time_out_assert(20, wallet_0.get_unconfirmed_balance, funds)
assert await wallet_0.get_confirmed_balance() == funds
assert await wallet_0.get_unconfirmed_balance() == funds
tx = await wallet_0.generate_signed_transaction(
uint64(10),
await wallet_node_1.wallet_state_manager.main_wallet.get_new_puzzlehash(),
uint64(0),
)
await wallet_0.push_transaction(tx)
await time_out_assert(20, full_node_0.mempool_manager.get_spendbundle, tx.spend_bundle, tx.name)
# Full node height 11, wallet height 9
await time_out_assert(20, wallet_0.get_confirmed_balance, funds)
await time_out_assert(20, wallet_0.get_unconfirmed_balance, funds - 10)
for i in range(0, 4):
await full_node_api_0.farm_new_transaction_block(FarmNewBlockProtocol(bytes32(32 * b"0")))
# here it's num_blocks + 1 because our last reward is included in the first block that we just farmed
new_funds = sum(
[
calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
for i in range(1, num_blocks + 1)
]
)
# Full node height 17, wallet height 15
await time_out_assert(20, wallet_0.get_confirmed_balance, new_funds - 10)
await time_out_assert(20, wallet_0.get_unconfirmed_balance, new_funds - 10)
await time_out_assert(20, wallet_1.get_confirmed_balance, 10)
tx = await wallet_1.generate_signed_transaction(uint64(5), await wallet_0.get_new_puzzlehash(), uint64(0))
await wallet_1.push_transaction(tx)
await time_out_assert(20, full_node_0.mempool_manager.get_spendbundle, tx.spend_bundle, tx.name)
for i in range(0, 4):
await full_node_api_0.farm_new_transaction_block(FarmNewBlockProtocol(bytes32(32 * b"0")))
await wallet_0.get_confirmed_balance()
await wallet_0.get_unconfirmed_balance()
await wallet_1.get_confirmed_balance()
await time_out_assert(20, wallet_0.get_confirmed_balance, new_funds - 5)
await time_out_assert(20, wallet_0.get_unconfirmed_balance, new_funds - 5)
await time_out_assert(20, wallet_1.get_confirmed_balance, 5)
# @pytest.mark.asyncio
# async def test_wallet_finds_full_node(self):
# node_iters = [
# setup_full_node(
# test_constants,
# "blockchain_test.db",
# 11234,
# introducer_port=11236,
# simulator=False,
# ),
# setup_wallet_node(
# 11235,
# test_constants,
# None,
# introducer_port=11236,
# ),
# setup_introducer(11236),
# ]
#
# full_node_api = await node_iters[0].__anext__()
# wallet, wallet_server = await node_iters[1].__anext__()
# introducer, introducer_server = await node_iters[2].__anext__()
#
# async def has_full_node():
# outbound: List[WSFlaxConnection] = wallet.server.get_outgoing_connections()
# for connection in outbound:
# if connection.connection_type is NodeType.FULL_NODE:
# return True
# return False
#
# await time_out_assert(
# 2 * 60,
# has_full_node,
# True,
# )
# await _teardown_nodes(node_iters)
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
async def test_wallet_make_transaction_with_fee(
self,
two_wallet_nodes: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, FlaxServer]], BlockTools],
trusted: bool,
self_hostname: str,
) -> None:
num_blocks = 5
full_nodes, wallets, _ = two_wallet_nodes
full_node_1 = full_nodes[0]
wallet_node, server_2 = wallets[0]
wallet_node_2, server_3 = wallets[1]
wallet = wallet_node.wallet_state_manager.main_wallet
ph = await wallet.get_new_puzzlehash()
if trusted:
wallet_node.config["trusted_peers"] = {
full_node_1.full_node.server.node_id.hex(): full_node_1.full_node.server.node_id.hex()
}
wallet_node_2.config["trusted_peers"] = {
full_node_1.full_node.server.node_id.hex(): full_node_1.full_node.server.node_id.hex()
}
else:
wallet_node.config["trusted_peers"] = {}
wallet_node_2.config["trusted_peers"] = {}
await server_2.start_client(PeerInfo(self_hostname, uint16(full_node_1.full_node.server._port)), None)
for i in range(0, num_blocks):
await full_node_1.farm_new_transaction_block(FarmNewBlockProtocol(ph))
funds = sum(
[calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks)]
)
await time_out_assert(20, wallet.get_confirmed_balance, funds)
await time_out_assert(20, wallet.get_unconfirmed_balance, funds)
assert await wallet.get_confirmed_balance() == funds
assert await wallet.get_unconfirmed_balance() == funds
tx_amount = 3200000000000
tx_fee = 10
tx = await wallet.generate_signed_transaction(
uint64(tx_amount),
await wallet_node_2.wallet_state_manager.main_wallet.get_new_puzzlehash(),
uint64(tx_fee),
)
assert tx.spend_bundle is not None
fees = tx.spend_bundle.fees()
assert fees == tx_fee
await wallet.push_transaction(tx)
await time_out_assert(20, full_node_1.full_node.mempool_manager.get_spendbundle, tx.spend_bundle, tx.name)
await time_out_assert(20, wallet.get_confirmed_balance, funds)
await time_out_assert(20, wallet.get_unconfirmed_balance, funds - tx_amount - tx_fee)
for i in range(0, num_blocks):
await full_node_1.farm_new_transaction_block(FarmNewBlockProtocol(bytes32(32 * b"0")))
new_funds = sum(
[
calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
for i in range(1, num_blocks + 1)
]
)
await time_out_assert(20, wallet.get_confirmed_balance, new_funds - tx_amount - tx_fee)
await time_out_assert(20, wallet.get_unconfirmed_balance, new_funds - tx_amount - tx_fee)
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
async def test_wallet_create_hit_max_send_amount(
self,
two_wallet_nodes: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, FlaxServer]], BlockTools],
trusted: bool,
self_hostname: str,
) -> None:
num_blocks = 5
full_nodes, wallets, _ = two_wallet_nodes
full_node_1 = full_nodes[0]
wallet_node, server_2 = wallets[0]
wallet_node_2, server_3 = wallets[1]
wallet = wallet_node.wallet_state_manager.main_wallet
ph = await wallet.get_new_puzzlehash()
if trusted:
wallet_node.config["trusted_peers"] = {
full_node_1.full_node.server.node_id.hex(): full_node_1.full_node.server.node_id.hex()
}
wallet_node_2.config["trusted_peers"] = {
full_node_1.full_node.server.node_id.hex(): full_node_1.full_node.server.node_id.hex()
}
else:
wallet_node.config["trusted_peers"] = {}
wallet_node_2.config["trusted_peers"] = {}
await server_2.start_client(PeerInfo(self_hostname, uint16(full_node_1.full_node.server._port)), None)
for i in range(0, num_blocks):
await full_node_1.farm_new_transaction_block(FarmNewBlockProtocol(ph))
funds = sum(
[calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks)]
)
await time_out_assert(20, wallet.get_confirmed_balance, funds)
primaries: List[AmountWithPuzzlehash] = []
for i in range(0, 60):
primaries.append({"puzzlehash": ph, "amount": uint64(1000000000 + i), "memos": []})
tx_split_coins = await wallet.generate_signed_transaction(uint64(1), ph, uint64(0), primaries=primaries)
assert tx_split_coins.spend_bundle is not None
await wallet.push_transaction(tx_split_coins)
await time_out_assert(
15, tx_in_pool, True, full_node_1.full_node.mempool_manager, tx_split_coins.spend_bundle.name()
)
for i in range(0, num_blocks):
await full_node_1.farm_new_transaction_block(FarmNewBlockProtocol(bytes32(32 * b"0")))
funds = sum(
[
calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
for i in range(1, num_blocks + 1)
]
)
await time_out_assert(90, wallet.get_confirmed_balance, funds)
max_sent_amount = await wallet.get_max_send_amount()
# 1) Generate transaction that is under the limit
under_limit_tx = None
try:
under_limit_tx = await wallet.generate_signed_transaction(
uint64(max_sent_amount - 1),
ph,
uint64(0),
)
except ValueError:
assert ValueError
assert under_limit_tx is not None
# 2) Generate transaction that is equal to limit
at_limit_tx = None
try:
at_limit_tx = await wallet.generate_signed_transaction(
uint64(max_sent_amount),
ph,
uint64(0),
)
except ValueError:
assert ValueError
assert at_limit_tx is not None
# 3) Generate transaction that is greater than limit
above_limit_tx = None
try:
above_limit_tx = await wallet.generate_signed_transaction(
uint64(max_sent_amount + 1),
ph,
uint64(0),
)
except ValueError:
pass
assert above_limit_tx is None
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
async def test_wallet_prevent_fee_theft(
self,
two_wallet_nodes: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, FlaxServer]], BlockTools],
trusted: bool,
self_hostname: str,
) -> None:
num_blocks = 5
full_nodes, wallets, _ = two_wallet_nodes
full_node_1 = full_nodes[0]
wallet_node, server_2 = wallets[0]
wallet_node_2, server_3 = wallets[1]
wallet = wallet_node.wallet_state_manager.main_wallet
ph = await wallet.get_new_puzzlehash()
if trusted:
wallet_node.config["trusted_peers"] = {
full_node_1.full_node.server.node_id.hex(): full_node_1.full_node.server.node_id.hex()
}
wallet_node_2.config["trusted_peers"] = {
full_node_1.full_node.server.node_id.hex(): full_node_1.full_node.server.node_id.hex()
}
else:
wallet_node.config["trusted_peers"] = {}
wallet_node_2.config["trusted_peers"] = {}
await server_2.start_client(PeerInfo(self_hostname, uint16(full_node_1.full_node.server._port)), None)
for i in range(0, num_blocks):
await full_node_1.farm_new_transaction_block(FarmNewBlockProtocol(ph))
funds = sum(
[calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks)]
)
await time_out_assert(20, wallet.get_confirmed_balance, funds)
await time_out_assert(20, wallet.get_unconfirmed_balance, funds)
assert await wallet.get_confirmed_balance() == funds
assert await wallet.get_unconfirmed_balance() == funds
tx_amount = 3200000000000
tx_fee = 300000000000
tx = await wallet.generate_signed_transaction(
uint64(tx_amount),
await wallet_node_2.wallet_state_manager.main_wallet.get_new_puzzlehash(),
uint64(tx_fee),
)
assert tx.spend_bundle is not None
# extract coin_spend from generated spend_bundle
for cs in tx.spend_bundle.coin_spends:
if cs.additions() == []:
stolen_cs = cs
# get a legit signature
stolen_sb = await wallet.sign_transaction([stolen_cs])
now = uint64(int(time.time()))
add_list = list(stolen_sb.additions())
rem_list = list(stolen_sb.removals())
name = stolen_sb.name()
stolen_tx = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=now,
to_puzzle_hash=bytes32(32 * b"0"),
amount=uint64(0),
fee_amount=uint64(stolen_cs.coin.amount),
confirmed=False,
sent=uint32(0),
spend_bundle=stolen_sb,
additions=add_list,
removals=rem_list,
wallet_id=wallet.id(),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.OUTGOING_TX.value),
name=name,
memos=list(compute_memos(stolen_sb).items()),
)
await wallet.push_transaction(stolen_tx)
await time_out_assert(20, wallet.get_confirmed_balance, funds)
await time_out_assert(20, wallet.get_unconfirmed_balance, funds - stolen_cs.coin.amount)
for i in range(0, num_blocks):
await full_node_1.farm_new_transaction_block(FarmNewBlockProtocol(bytes32(32 * b"0")))
# Funds have not decreased because stolen_tx was rejected
outstanding_coinbase_rewards = 2000000000000
await time_out_assert(20, wallet.get_confirmed_balance, funds + outstanding_coinbase_rewards)
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
async def test_wallet_tx_reorg(
self,
two_wallet_nodes: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, FlaxServer]], BlockTools],
trusted: bool,
self_hostname: str,
) -> None:
num_blocks = 5
full_nodes, wallets, _ = two_wallet_nodes
full_node_api = full_nodes[0]
fn_server = full_node_api.full_node.server
wallet_node, server_2 = wallets[0]
wallet_node_2, server_3 = wallets[1]
wallet = wallet_node.wallet_state_manager.main_wallet
wallet_2 = wallet_node_2.wallet_state_manager.main_wallet
ph = await wallet.get_new_puzzlehash()
ph2 = await wallet_2.get_new_puzzlehash()
if trusted:
wallet_node.config["trusted_peers"] = {fn_server.node_id.hex(): fn_server.node_id.hex()}
wallet_node_2.config["trusted_peers"] = {fn_server.node_id.hex(): fn_server.node_id.hex()}
else:
wallet_node.config["trusted_peers"] = {}
wallet_node_2.config["trusted_peers"] = {}
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
await server_3.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
funds = sum(
[calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks)]
)
# Waits a few seconds to receive rewards
all_blocks = await full_node_api.get_all_full_blocks()
# Ensure that we use a coin that we will not reorg out
coin = list(all_blocks[-3].get_included_reward_coins())[0]
await asyncio.sleep(5)
tx = await wallet.generate_signed_transaction(uint64(1000), ph2, coins={coin})
assert tx.spend_bundle is not None
await wallet.push_transaction(tx)
await full_node_api.full_node.respond_transaction(tx.spend_bundle, tx.name)
await time_out_assert(20, full_node_api.full_node.mempool_manager.get_spendbundle, tx.spend_bundle, tx.name)
await time_out_assert(20, wallet.get_confirmed_balance, funds)
for i in range(0, 2):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32(32 * b"0")))
await time_out_assert(20, wallet_2.get_confirmed_balance, 1000)
funds -= 1000
await time_out_assert(20, wallet_node.wallet_state_manager.blockchain.get_finished_sync_up_to, 7)
peak = full_node_api.full_node.blockchain.get_peak()
assert peak is not None
peak_height = peak.height
print(peak_height)
# Perform a reorg, which will revert the transaction in the full node and wallet, and cause wallet to resubmit
await full_node_api.reorg_from_index_to_new_index(
ReorgProtocol(uint32(peak_height - 3), uint32(peak_height + 3), bytes32(32 * b"0"), None)
)
funds = sum(
[
calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
for i in range(1, peak_height - 2)
]
)
await time_out_assert(20, full_node_api.full_node.blockchain.get_peak_height, peak_height + 3)
await time_out_assert(20, wallet_node.wallet_state_manager.blockchain.get_finished_sync_up_to, peak_height + 3)
# Farm a few blocks so we can confirm the resubmitted transaction
for i in range(0, num_blocks):
await asyncio.sleep(1)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32(32 * b"0")))
# By this point, the transaction should be confirmed
await time_out_assert(20, wallet.get_confirmed_balance, funds - 1000)
unconfirmed = await wallet_node.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(int(wallet.id()))
assert len(unconfirmed) == 0
tx_record = await wallet_node.wallet_state_manager.tx_store.get_transaction_record(tx.name)
assert tx_record is not None
removed = tx_record.removals[0]
added = tx_record.additions[0]
added_1 = tx_record.additions[1]
wallet_coin_record_rem = await wallet_node.wallet_state_manager.coin_store.get_coin_record(removed.name())
assert wallet_coin_record_rem is not None
assert wallet_coin_record_rem.spent
coin_record_full_node = await full_node_api.full_node.coin_store.get_coin_record(removed.name())
assert coin_record_full_node is not None
assert coin_record_full_node.spent
add_1_coin_record_full_node = await full_node_api.full_node.coin_store.get_coin_record(added.name())
assert add_1_coin_record_full_node is not None
assert add_1_coin_record_full_node.confirmed_block_index > 0
add_2_coin_record_full_node = await full_node_api.full_node.coin_store.get_coin_record(added_1.name())
assert add_2_coin_record_full_node is not None
assert add_2_coin_record_full_node.confirmed_block_index > 0
@pytest.mark.parametrize(
"trusted",
[False],
)
@pytest.mark.asyncio
async def test_address_sliding_window(
self,
wallet_node_100_pk: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, FlaxServer]], BlockTools],
trusted: bool,
self_hostname: str,
) -> None:
full_nodes, wallets, _ = wallet_node_100_pk
full_node_api = full_nodes[0]
server_1: FlaxServer = full_node_api.full_node.server
wallet_node, server_2 = wallets[0]
if trusted:
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | true |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/test_singleton_lifecycle_fast.py | tests/wallet/test_singleton_lifecycle_fast.py | from dataclasses import dataclass
from typing import Any, Callable, Dict, List, Optional, Tuple
from blspy import G1Element, G2Element
from clvm_tools import binutils
from flax.types.blockchain_format.program import Program, SerializedProgram
from flax.types.announcement import Announcement
from flax.types.blockchain_format.coin import Coin
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.coin_spend import CoinSpend
from flax.types.condition_opcodes import ConditionOpcode
from flax.types.spend_bundle import SpendBundle
from flax.util.ints import uint64
from flax.wallet.puzzles.load_clvm import load_clvm
from tests.clvm.coin_store import BadSpendBundleError, CoinStore, CoinTimestamp
SINGLETON_MOD = load_clvm("singleton_top_layer.clvm")
LAUNCHER_PUZZLE = load_clvm("singleton_launcher.clvm")
P2_SINGLETON_MOD = load_clvm("p2_singleton_or_delayed_puzhash.clvm")
POOL_MEMBER_MOD = load_clvm("pool_member_innerpuz.clvm")
POOL_WAITINGROOM_MOD = load_clvm("pool_waitingroom_innerpuz.clvm")
LAUNCHER_PUZZLE_HASH = LAUNCHER_PUZZLE.get_tree_hash()
SINGLETON_MOD_HASH = SINGLETON_MOD.get_tree_hash()
P2_SINGLETON_MOD_HASH = P2_SINGLETON_MOD.get_tree_hash()
ANYONE_CAN_SPEND_PUZZLE = Program.to(1)
ANYONE_CAN_SPEND_WITH_PADDING_PUZZLE_HASH = Program.to(binutils.assemble("(a (q . 1) 3)")).get_tree_hash()
POOL_REWARD_PREFIX_MAINNET = bytes32.fromhex("ccd5bb71183532bff220ba46c268991a00000000000000000000000000000000")
MAX_BLOCK_COST_CLVM = int(1e18)
COST_PER_BYTE = int(12000)
class PuzzleDB:
def __init__(self):
self._db = {}
def add_puzzle(self, puzzle: Program):
self._db[puzzle.get_tree_hash()] = Program.from_bytes(bytes(puzzle))
def puzzle_for_hash(self, puzzle_hash: bytes32) -> Optional[Program]:
return self._db.get(puzzle_hash)
def from_kwargs(kwargs, key, type_info=Any):
"""Raise an exception if `kwargs[key]` is missing or the wrong type"""
"""for now, we just check that it's present"""
if key not in kwargs:
raise ValueError(f"`{key}` missing in call to `solve`")
return kwargs[key]
Solver_F = Callable[["Solver", PuzzleDB, List[Program], Any], Program]
class Solver:
"""
This class registers puzzle templates by hash and solves them.
"""
def __init__(self):
self.solvers_by_puzzle_hash = {}
def register_solver(self, puzzle_hash: bytes32, solver_f: Solver_F):
if puzzle_hash in self.solvers_by_puzzle_hash:
raise ValueError(f"solver registered for {puzzle_hash}")
self.solvers_by_puzzle_hash[puzzle_hash] = solver_f
def solve(self, puzzle_db: PuzzleDB, puzzle: Program, **kwargs: Any) -> Program:
"""
The legal values and types for `kwargs` depends on the underlying solver
that's invoked. The `kwargs` are passed through to any inner solvers
that may need to be called.
"""
puzzle_hash = puzzle.get_tree_hash()
puzzle_args = []
if puzzle_hash not in self.solvers_by_puzzle_hash:
puzzle_template, args = puzzle.uncurry()
puzzle_args = list(args.as_iter())
puzzle_hash = puzzle_template.get_tree_hash()
solver_f = self.solvers_by_puzzle_hash.get(puzzle_hash)
if solver_f:
return solver_f(self, puzzle_db, puzzle_args, kwargs)
raise ValueError("can't solve")
def solve_launcher(solver: Solver, puzzle_db: PuzzleDB, args: List[Program], kwargs: Dict) -> Program:
launcher_amount = from_kwargs(kwargs, "launcher_amount", int)
destination_puzzle_hash = from_kwargs(kwargs, "destination_puzzle_hash", bytes32)
metadata = from_kwargs(kwargs, "metadata", List[Tuple[str, Program]])
solution = Program.to([destination_puzzle_hash, launcher_amount, metadata])
return solution
def solve_anyone_can_spend(solver: Solver, puzzle_db: PuzzleDB, args: List[Program], kwargs: Dict) -> Program:
"""
This is the anyone-can-spend puzzle `1`. Note that farmers can easily steal this coin, so don't use
it except for testing.
"""
conditions = from_kwargs(kwargs, "conditions", List[Program])
solution = Program.to(conditions)
return solution
def solve_anyone_can_spend_with_padding(
solver: Solver, puzzle_db: PuzzleDB, args: List[Program], kwargs: Dict
) -> Program:
"""This is the puzzle `(a (q . 1) 3)`. It's only for testing."""
conditions = from_kwargs(kwargs, "conditions", List[Program])
solution = Program.to((0, conditions))
return solution
def solve_singleton(solver: Solver, puzzle_db: PuzzleDB, args: List[Program], kwargs: Dict) -> Program:
"""
`lineage_proof`: a `Program` that proves the parent is also a singleton (or the launcher).
`coin_amount`: a necessarily-odd value of mojos in this coin.
"""
singleton_struct, inner_puzzle = args
inner_solution = solver.solve(puzzle_db, inner_puzzle, **kwargs)
lineage_proof = from_kwargs(kwargs, "lineage_proof", Program)
coin_amount = from_kwargs(kwargs, "coin_amount", int)
solution = inner_solution.to([lineage_proof, coin_amount, inner_solution.rest()])
return solution
def solve_pool_member(solver: Solver, puzzle_db: PuzzleDB, args: List[Program], kwargs: Dict) -> Program:
pool_member_spend_type = from_kwargs(kwargs, "pool_member_spend_type")
allowable = ["to-waiting-room", "claim-p2-nft"]
if pool_member_spend_type not in allowable:
raise ValueError("`pool_member_spend_type` must be one of %s for POOL_MEMBER puzzle" % "/".join(allowable))
to_waiting_room = pool_member_spend_type == "to-waiting-room"
if to_waiting_room:
key_value_list = from_kwargs(kwargs, "key_value_list", List[Tuple[str, Program]])
return Program.to([0, 1, 0, 0, key_value_list])
# it's an "absorb_pool_reward" type
pool_reward_amount = from_kwargs(kwargs, "pool_reward_amount", int)
pool_reward_height = from_kwargs(kwargs, "pool_reward_height", int)
solution = Program.to([0, pool_reward_amount, pool_reward_height])
return solution
def solve_pool_waiting_room(solver: Solver, puzzle_db: PuzzleDB, args: List[Program], kwargs: Dict) -> Program:
pool_leaving_spend_type = from_kwargs(kwargs, "pool_leaving_spend_type")
allowable = ["exit-waiting-room", "claim-p2-nft"]
if pool_leaving_spend_type not in allowable:
raise ValueError("`pool_leaving_spend_type` must be one of %s for POOL_MEMBER puzzle" % "/".join(allowable))
exit_waiting_room = pool_leaving_spend_type == "exit-waiting-room"
if exit_waiting_room:
key_value_list = from_kwargs(kwargs, "key_value_list", List[Tuple[str, Program]])
destination_puzzle_hash = from_kwargs(kwargs, "destination_puzzle_hash", int)
return Program.to([0, 1, key_value_list, destination_puzzle_hash])
# it's an "absorb_pool_reward" type
pool_reward_amount = from_kwargs(kwargs, "pool_reward_amount", int)
pool_reward_height = from_kwargs(kwargs, "pool_reward_height", int)
solution = Program.to([0, 0, pool_reward_amount, pool_reward_height])
return solution
def solve_p2_singleton(solver: Solver, puzzle_db: PuzzleDB, args: List[Program], kwargs: Dict) -> Program:
p2_singleton_spend_type = from_kwargs(kwargs, "p2_singleton_spend_type")
allowable = ["claim-p2-nft", "delayed-spend"]
if p2_singleton_spend_type not in allowable:
raise ValueError("`p2_singleton_spend_type` must be one of %s for P2_SINGLETON puzzle" % "/".join(allowable))
claim_p2_nft = p2_singleton_spend_type == "claim-p2-nft"
if claim_p2_nft:
singleton_inner_puzzle_hash = from_kwargs(kwargs, "singleton_inner_puzzle_hash")
p2_singleton_coin_name = from_kwargs(kwargs, "p2_singleton_coin_name")
solution = Program.to([singleton_inner_puzzle_hash, p2_singleton_coin_name])
return solution
raise ValueError("can't solve `delayed-spend` yet")
SOLVER = Solver()
SOLVER.register_solver(LAUNCHER_PUZZLE_HASH, solve_launcher)
SOLVER.register_solver(ANYONE_CAN_SPEND_WITH_PADDING_PUZZLE_HASH, solve_anyone_can_spend_with_padding)
SOLVER.register_solver(SINGLETON_MOD_HASH, solve_singleton)
SOLVER.register_solver(POOL_MEMBER_MOD.get_tree_hash(), solve_pool_member)
SOLVER.register_solver(POOL_WAITINGROOM_MOD.get_tree_hash(), solve_pool_waiting_room)
SOLVER.register_solver(ANYONE_CAN_SPEND_PUZZLE.get_tree_hash(), solve_anyone_can_spend)
SOLVER.register_solver(P2_SINGLETON_MOD_HASH, solve_p2_singleton)
def solve_puzzle(puzzle_db: PuzzleDB, puzzle: Program, **kwargs) -> Program:
return SOLVER.solve(puzzle_db, puzzle, **kwargs)
@dataclass
class SingletonWallet:
launcher_id: bytes32
launcher_puzzle_hash: bytes32
key_value_list: Program
current_state: Coin
lineage_proof: Program
def inner_puzzle(self, puzzle_db: PuzzleDB) -> Optional[Program]:
puzzle = puzzle_db.puzzle_for_hash(self.current_state.puzzle_hash)
if puzzle is None:
return None
return self.inner_puzzle_for_puzzle(puzzle)
def inner_puzzle_for_puzzle(self, puzzle: Program) -> Optional[Program]:
assert puzzle.get_tree_hash() == self.current_state.puzzle_hash
if puzzle is None:
return puzzle
template, args = puzzle.uncurry()
assert bytes(template) == bytes(SINGLETON_MOD)
singleton_struct, inner_puzzle = list(args.as_iter())
return inner_puzzle
def coin_spend_for_conditions(self, puzzle_db: PuzzleDB, **kwargs) -> CoinSpend:
coin = self.current_state
puzzle_reveal = puzzle_db.puzzle_for_hash(coin.puzzle_hash)
assert puzzle_reveal is not None
solution = solve_puzzle(
puzzle_db, puzzle_reveal, lineage_proof=self.lineage_proof, coin_amount=coin.amount, **kwargs
)
return CoinSpend(coin, puzzle_reveal, solution)
def update_state(self, puzzle_db: PuzzleDB, removals: List[CoinSpend]) -> int:
state_change_count = 0
current_coin_name = self.current_state.name()
for coin_spend in removals:
if coin_spend.coin.name() == current_coin_name:
for coin in coin_spend.additions():
if coin.amount & 1 == 1:
parent_puzzle_hash = coin_spend.coin.puzzle_hash
parent_puzzle = puzzle_db.puzzle_for_hash(parent_puzzle_hash)
assert parent_puzzle is not None
parent_inner_puzzle = self.inner_puzzle_for_puzzle(parent_puzzle)
assert parent_inner_puzzle is not None
parent_inner_puzzle_hash = parent_inner_puzzle.get_tree_hash()
lineage_proof = Program.to(
[self.current_state.parent_coin_info, parent_inner_puzzle_hash, coin.amount]
)
self.lineage_proof = lineage_proof
self.current_state = coin
state_change_count += 1
return state_change_count
def adaptor_for_singleton_inner_puzzle(puzzle: Program) -> Program:
"""
The singleton puzzle requires an inner puzzle which gets passed some "truths" from
the singleton that are guaranteed to be correct. Using these truths may reduce the
size of the inner puzzle, since any values can be used knowing they are checked elsewhere.
However, an inner puzzle that is not aware that this first argument contains these
values can be "adapted" using this function to ignore the first argument (and slide
the subsequent arguments over), allowing any inner puzzle that thinks it's an outer
puzzle to work as a singleton inner puzzle.
"""
# this is pretty slow and lame
return Program.to(binutils.assemble("(a (q . %s) 3)" % binutils.disassemble(puzzle)))
def launcher_conditions_and_spend_bundle(
puzzle_db: PuzzleDB,
parent_coin_id: bytes32,
launcher_amount: uint64,
initial_singleton_inner_puzzle: Program,
metadata: List[Tuple[str, str]],
launcher_puzzle: Program,
) -> Tuple[bytes32, List[Program], SpendBundle]:
puzzle_db.add_puzzle(launcher_puzzle)
launcher_puzzle_hash = launcher_puzzle.get_tree_hash()
launcher_coin = Coin(parent_coin_id, launcher_puzzle_hash, launcher_amount)
singleton_full_puzzle = singleton_puzzle(launcher_coin.name(), launcher_puzzle_hash, initial_singleton_inner_puzzle)
puzzle_db.add_puzzle(singleton_full_puzzle)
singleton_full_puzzle_hash = singleton_full_puzzle.get_tree_hash()
message_program = Program.to([singleton_full_puzzle_hash, launcher_amount, metadata])
expected_announcement = Announcement(launcher_coin.name(), message_program.get_tree_hash())
expected_conditions = []
expected_conditions.append(
Program.to(
binutils.assemble(f"(0x{ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT.hex()} 0x{expected_announcement.name()})")
)
)
expected_conditions.append(
Program.to(
binutils.assemble(f"(0x{ConditionOpcode.CREATE_COIN.hex()} 0x{launcher_puzzle_hash} {launcher_amount})")
)
)
solution = solve_puzzle(
puzzle_db,
launcher_puzzle,
destination_puzzle_hash=singleton_full_puzzle_hash,
launcher_amount=launcher_amount,
metadata=metadata,
)
coin_spend = CoinSpend(launcher_coin, SerializedProgram.from_program(launcher_puzzle), solution)
spend_bundle = SpendBundle([coin_spend], G2Element())
return launcher_coin.name(), expected_conditions, spend_bundle
def singleton_puzzle(launcher_id: bytes32, launcher_puzzle_hash: bytes32, inner_puzzle: Program) -> Program:
return SINGLETON_MOD.curry((SINGLETON_MOD_HASH, (launcher_id, launcher_puzzle_hash)), inner_puzzle)
def singleton_puzzle_hash(launcher_id: bytes32, launcher_puzzle_hash: bytes32, inner_puzzle: Program) -> bytes32:
return singleton_puzzle(launcher_id, launcher_puzzle_hash, inner_puzzle).get_tree_hash()
def solution_for_singleton_puzzle(lineage_proof: Program, my_amount: int, inner_solution: Program) -> Program:
return Program.to([lineage_proof, my_amount, inner_solution])
def p2_singleton_puzzle_for_launcher(
puzzle_db: PuzzleDB,
launcher_id: Program,
launcher_puzzle_hash: bytes32,
seconds_delay: int,
delayed_puzzle_hash: bytes32,
) -> Program:
puzzle = P2_SINGLETON_MOD.curry(
SINGLETON_MOD_HASH, launcher_id, launcher_puzzle_hash, seconds_delay, delayed_puzzle_hash
)
puzzle_db.add_puzzle(puzzle)
return puzzle
def p2_singleton_puzzle_hash_for_launcher(
puzzle_db: PuzzleDB,
launcher_id: Program,
launcher_puzzle_hash: bytes32,
seconds_delay: int,
delayed_puzzle_hash: bytes32,
) -> bytes32:
return p2_singleton_puzzle_for_launcher(
puzzle_db, launcher_id, launcher_puzzle_hash, seconds_delay, delayed_puzzle_hash
).get_tree_hash()
def claim_p2_singleton(
puzzle_db: PuzzleDB, singleton_wallet: SingletonWallet, p2_singleton_coin: Coin
) -> Tuple[CoinSpend, List[Program]]:
inner_puzzle = singleton_wallet.inner_puzzle(puzzle_db)
assert inner_puzzle
inner_puzzle_hash = inner_puzzle.get_tree_hash()
p2_singleton_puzzle = puzzle_db.puzzle_for_hash(p2_singleton_coin.puzzle_hash)
assert p2_singleton_puzzle is not None
p2_singleton_coin_name = p2_singleton_coin.name()
p2_singleton_solution = solve_puzzle(
puzzle_db,
p2_singleton_puzzle,
p2_singleton_spend_type="claim-p2-nft",
singleton_inner_puzzle_hash=inner_puzzle_hash,
p2_singleton_coin_name=p2_singleton_coin_name,
)
p2_singleton_coin_spend = CoinSpend(
p2_singleton_coin,
p2_singleton_puzzle.to_serialized_program(),
p2_singleton_solution,
)
expected_p2_singleton_announcement = Announcement(p2_singleton_coin_name, bytes(b"$")).name()
singleton_conditions = [
Program.to([ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT, p2_singleton_coin_name]),
Program.to([ConditionOpcode.CREATE_COIN, inner_puzzle_hash, 1]),
Program.to([ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT, expected_p2_singleton_announcement]),
]
return p2_singleton_coin_spend, singleton_conditions
def lineage_proof_for_coin_spend(coin_spend: CoinSpend) -> Program:
"""Take a coin solution, return a lineage proof for their child to use in spends"""
coin = coin_spend.coin
parent_name = coin.parent_coin_info
amount = coin.amount
inner_puzzle_hash = None
if coin.puzzle_hash == LAUNCHER_PUZZLE_HASH:
return Program.to([parent_name, amount])
full_puzzle = Program.from_bytes(bytes(coin_spend.puzzle_reveal))
_, args = full_puzzle.uncurry()
_, __, ___, inner_puzzle = list(args.as_iter())
inner_puzzle_hash = inner_puzzle.get_tree_hash()
return Program.to([parent_name, inner_puzzle_hash, amount])
def create_throwaway_pubkey(seed: bytes) -> G1Element:
return G1Element.generator()
def assert_coin_spent(coin_store: CoinStore, coin: Coin, is_spent=True):
coin_record = coin_store.coin_record(coin.name())
assert coin_record is not None
assert coin_record.spent is is_spent
def spend_coin_to_singleton(
puzzle_db: PuzzleDB, launcher_puzzle: Program, coin_store: CoinStore, now: CoinTimestamp
) -> Tuple[List[Coin], List[CoinSpend]]:
farmed_coin_amount = 100000
metadata = [("foo", "bar")]
now = CoinTimestamp(10012300, 1)
farmed_coin = coin_store.farm_coin(ANYONE_CAN_SPEND_PUZZLE.get_tree_hash(), now, amount=farmed_coin_amount)
now.seconds += 500
now.height += 1
launcher_amount: uint64 = uint64(1)
launcher_puzzle = LAUNCHER_PUZZLE
launcher_puzzle_hash = launcher_puzzle.get_tree_hash()
initial_singleton_puzzle = adaptor_for_singleton_inner_puzzle(ANYONE_CAN_SPEND_PUZZLE)
launcher_id, condition_list, launcher_spend_bundle = launcher_conditions_and_spend_bundle(
puzzle_db, farmed_coin.name(), launcher_amount, initial_singleton_puzzle, metadata, launcher_puzzle
)
conditions = Program.to(condition_list)
coin_spend = CoinSpend(farmed_coin, ANYONE_CAN_SPEND_PUZZLE, conditions)
spend_bundle = SpendBundle.aggregate([launcher_spend_bundle, SpendBundle([coin_spend], G2Element())])
additions, removals = coin_store.update_coin_store_for_spend_bundle(
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
)
launcher_coin = launcher_spend_bundle.coin_spends[0].coin
assert_coin_spent(coin_store, launcher_coin)
assert_coin_spent(coin_store, farmed_coin)
singleton_expected_puzzle = singleton_puzzle(launcher_id, launcher_puzzle_hash, initial_singleton_puzzle)
singleton_expected_puzzle_hash = singleton_expected_puzzle.get_tree_hash()
expected_singleton_coin = Coin(launcher_coin.name(), singleton_expected_puzzle_hash, launcher_amount)
assert_coin_spent(coin_store, expected_singleton_coin, is_spent=False)
return additions, removals
def find_interesting_singletons(puzzle_db: PuzzleDB, removals: List[CoinSpend]) -> List[SingletonWallet]:
singletons = []
for coin_spend in removals:
if coin_spend.coin.puzzle_hash == LAUNCHER_PUZZLE_HASH:
r = Program.from_bytes(bytes(coin_spend.solution))
key_value_list = r.rest().rest().first()
eve_coin = coin_spend.additions()[0]
lineage_proof = lineage_proof_for_coin_spend(coin_spend)
launcher_id = coin_spend.coin.name()
singleton = SingletonWallet(
launcher_id,
coin_spend.coin.puzzle_hash,
key_value_list,
eve_coin,
lineage_proof,
)
singletons.append(singleton)
return singletons
def filter_p2_singleton(puzzle_db: PuzzleDB, singleton_wallet: SingletonWallet, additions: List[Coin]) -> List[Coin]:
r = []
for coin in additions:
puzzle = puzzle_db.puzzle_for_hash(coin.puzzle_hash)
if puzzle is None:
continue
template, args = puzzle.uncurry()
if template.get_tree_hash() == P2_SINGLETON_MOD_HASH:
r.append(coin)
return r
def test_lifecycle_with_coinstore_as_wallet():
PUZZLE_DB = PuzzleDB()
interested_singletons = []
#######
# farm a coin
coin_store = CoinStore(int.from_bytes(POOL_REWARD_PREFIX_MAINNET, "big"))
now = CoinTimestamp(10012300, 1)
DELAY_SECONDS = 86400
DELAY_PUZZLE_HASH = bytes([0] * 32)
#######
# spend coin to a singleton
additions, removals = spend_coin_to_singleton(PUZZLE_DB, LAUNCHER_PUZZLE, coin_store, now)
assert len(list(coin_store.all_unspent_coins())) == 1
new_singletons = find_interesting_singletons(PUZZLE_DB, removals)
interested_singletons.extend(new_singletons)
assert len(interested_singletons) == 1
SINGLETON_WALLET = interested_singletons[0]
#######
# farm a `p2_singleton`
pool_reward_puzzle_hash = p2_singleton_puzzle_hash_for_launcher(
PUZZLE_DB, SINGLETON_WALLET.launcher_id, SINGLETON_WALLET.launcher_puzzle_hash, DELAY_SECONDS, DELAY_PUZZLE_HASH
)
farmed_coin = coin_store.farm_coin(pool_reward_puzzle_hash, now)
now.seconds += 500
now.height += 1
p2_singleton_coins = filter_p2_singleton(PUZZLE_DB, SINGLETON_WALLET, [farmed_coin])
assert p2_singleton_coins == [farmed_coin]
assert len(list(coin_store.all_unspent_coins())) == 2
#######
# now collect the `p2_singleton` using the singleton
for coin in p2_singleton_coins:
p2_singleton_coin_spend, singleton_conditions = claim_p2_singleton(PUZZLE_DB, SINGLETON_WALLET, coin)
coin_spend = SINGLETON_WALLET.coin_spend_for_conditions(PUZZLE_DB, conditions=singleton_conditions)
spend_bundle = SpendBundle([coin_spend, p2_singleton_coin_spend], G2Element())
additions, removals = coin_store.update_coin_store_for_spend_bundle(
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
)
now.seconds += 500
now.height += 1
SINGLETON_WALLET.update_state(PUZZLE_DB, removals)
assert len(list(coin_store.all_unspent_coins())) == 1
#######
# farm and collect another `p2_singleton`
pool_reward_puzzle_hash = p2_singleton_puzzle_hash_for_launcher(
PUZZLE_DB, SINGLETON_WALLET.launcher_id, SINGLETON_WALLET.launcher_puzzle_hash, DELAY_SECONDS, DELAY_PUZZLE_HASH
)
farmed_coin = coin_store.farm_coin(pool_reward_puzzle_hash, now)
now.seconds += 500
now.height += 1
p2_singleton_coins = filter_p2_singleton(PUZZLE_DB, SINGLETON_WALLET, [farmed_coin])
assert p2_singleton_coins == [farmed_coin]
assert len(list(coin_store.all_unspent_coins())) == 2
for coin in p2_singleton_coins:
p2_singleton_coin_spend, singleton_conditions = claim_p2_singleton(PUZZLE_DB, SINGLETON_WALLET, coin)
coin_spend = SINGLETON_WALLET.coin_spend_for_conditions(PUZZLE_DB, conditions=singleton_conditions)
spend_bundle = SpendBundle([coin_spend, p2_singleton_coin_spend], G2Element())
additions, removals = coin_store.update_coin_store_for_spend_bundle(
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
)
now.seconds += 500
now.height += 1
SINGLETON_WALLET.update_state(PUZZLE_DB, removals)
assert len(list(coin_store.all_unspent_coins())) == 1
#######
# loan the singleton to a pool
# puzzle_for_loan_singleton_to_pool(
# pool_puzzle_hash, p2_singleton_puzzle_hash, owner_public_key, pool_reward_prefix, relative_lock_height)
# calculate the series
owner_public_key = bytes(create_throwaway_pubkey(b"foo"))
pool_puzzle_hash = Program.to(bytes(create_throwaway_pubkey(b""))).get_tree_hash()
pool_reward_prefix = POOL_REWARD_PREFIX_MAINNET
relative_lock_height = 1440
pool_escaping_puzzle = POOL_WAITINGROOM_MOD.curry(
pool_puzzle_hash, pool_reward_puzzle_hash, owner_public_key, pool_reward_prefix, relative_lock_height
)
pool_escaping_puzzle_hash = pool_escaping_puzzle.get_tree_hash()
pool_member_puzzle = POOL_MEMBER_MOD.curry(
pool_puzzle_hash,
pool_reward_puzzle_hash,
owner_public_key,
pool_reward_prefix,
pool_escaping_puzzle_hash,
)
pool_member_puzzle_hash = pool_member_puzzle.get_tree_hash()
PUZZLE_DB.add_puzzle(pool_escaping_puzzle)
PUZZLE_DB.add_puzzle(
singleton_puzzle(SINGLETON_WALLET.launcher_id, SINGLETON_WALLET.launcher_puzzle_hash, pool_escaping_puzzle)
)
PUZZLE_DB.add_puzzle(pool_member_puzzle)
full_puzzle = singleton_puzzle(
SINGLETON_WALLET.launcher_id, SINGLETON_WALLET.launcher_puzzle_hash, pool_member_puzzle
)
PUZZLE_DB.add_puzzle(full_puzzle)
conditions = [Program.to([ConditionOpcode.CREATE_COIN, pool_member_puzzle_hash, 1])]
singleton_coin_spend = SINGLETON_WALLET.coin_spend_for_conditions(PUZZLE_DB, conditions=conditions)
spend_bundle = SpendBundle([singleton_coin_spend], G2Element())
additions, removals = coin_store.update_coin_store_for_spend_bundle(
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
)
assert len(list(coin_store.all_unspent_coins())) == 1
SINGLETON_WALLET.update_state(PUZZLE_DB, removals)
#######
# farm a `p2_singleton`
pool_reward_puzzle_hash = p2_singleton_puzzle_hash_for_launcher(
PUZZLE_DB, SINGLETON_WALLET.launcher_id, SINGLETON_WALLET.launcher_puzzle_hash, DELAY_SECONDS, DELAY_PUZZLE_HASH
)
farmed_coin = coin_store.farm_coin(pool_reward_puzzle_hash, now)
now.seconds += 500
now.height += 1
p2_singleton_coins = filter_p2_singleton(PUZZLE_DB, SINGLETON_WALLET, [farmed_coin])
assert p2_singleton_coins == [farmed_coin]
assert len(list(coin_store.all_unspent_coins())) == 2
#######
# now collect the `p2_singleton` for the pool
for coin in p2_singleton_coins:
p2_singleton_coin_spend, singleton_conditions = claim_p2_singleton(PUZZLE_DB, SINGLETON_WALLET, coin)
coin_spend = SINGLETON_WALLET.coin_spend_for_conditions(
PUZZLE_DB,
pool_member_spend_type="claim-p2-nft",
pool_reward_amount=p2_singleton_coin_spend.coin.amount,
pool_reward_height=now.height - 1,
)
spend_bundle = SpendBundle([coin_spend, p2_singleton_coin_spend], G2Element())
spend_bundle.debug()
additions, removals = coin_store.update_coin_store_for_spend_bundle(
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
)
now.seconds += 500
now.height += 1
SINGLETON_WALLET.update_state(PUZZLE_DB, removals)
assert len(list(coin_store.all_unspent_coins())) == 2
#######
# spend the singleton into the "leaving the pool" state
coin_spend = SINGLETON_WALLET.coin_spend_for_conditions(
PUZZLE_DB, pool_member_spend_type="to-waiting-room", key_value_list=Program.to([("foo", "bar")])
)
spend_bundle = SpendBundle([coin_spend], G2Element())
additions, removals = coin_store.update_coin_store_for_spend_bundle(
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
)
now.seconds += 500
now.height += 1
change_count = SINGLETON_WALLET.update_state(PUZZLE_DB, removals)
assert change_count == 1
assert len(list(coin_store.all_unspent_coins())) == 2
#######
# farm a `p2_singleton`
pool_reward_puzzle_hash = p2_singleton_puzzle_hash_for_launcher(
PUZZLE_DB, SINGLETON_WALLET.launcher_id, SINGLETON_WALLET.launcher_puzzle_hash, DELAY_SECONDS, DELAY_PUZZLE_HASH
)
farmed_coin = coin_store.farm_coin(pool_reward_puzzle_hash, now)
now.seconds += 500
now.height += 1
p2_singleton_coins = filter_p2_singleton(PUZZLE_DB, SINGLETON_WALLET, [farmed_coin])
assert p2_singleton_coins == [farmed_coin]
assert len(list(coin_store.all_unspent_coins())) == 3
#######
# now collect the `p2_singleton` for the pool
for coin in p2_singleton_coins:
p2_singleton_coin_spend, singleton_conditions = claim_p2_singleton(PUZZLE_DB, SINGLETON_WALLET, coin)
coin_spend = SINGLETON_WALLET.coin_spend_for_conditions(
PUZZLE_DB,
pool_leaving_spend_type="claim-p2-nft",
pool_reward_amount=p2_singleton_coin_spend.coin.amount,
pool_reward_height=now.height - 1,
)
spend_bundle = SpendBundle([coin_spend, p2_singleton_coin_spend], G2Element())
additions, removals = coin_store.update_coin_store_for_spend_bundle(
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
)
now.seconds += 500
now.height += 1
SINGLETON_WALLET.update_state(PUZZLE_DB, removals)
assert len(list(coin_store.all_unspent_coins())) == 3
#######
# now finish leaving the pool
initial_singleton_puzzle = adaptor_for_singleton_inner_puzzle(ANYONE_CAN_SPEND_PUZZLE)
coin_spend = SINGLETON_WALLET.coin_spend_for_conditions(
PUZZLE_DB,
pool_leaving_spend_type="exit-waiting-room",
key_value_list=[("foo1", "bar2"), ("foo2", "baz5")],
destination_puzzle_hash=initial_singleton_puzzle.get_tree_hash(),
)
spend_bundle = SpendBundle([coin_spend], G2Element())
full_puzzle = singleton_puzzle(
SINGLETON_WALLET.launcher_id, SINGLETON_WALLET.launcher_puzzle_hash, initial_singleton_puzzle
)
PUZZLE_DB.add_puzzle(full_puzzle)
try:
additions, removals = coin_store.update_coin_store_for_spend_bundle(
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
)
assert 0
except BadSpendBundleError as ex:
assert ex.args[0] == "condition validation failure Err.ASSERT_HEIGHT_RELATIVE_FAILED"
now.seconds += 350000
now.height += 1445
additions, removals = coin_store.update_coin_store_for_spend_bundle(
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
)
SINGLETON_WALLET.update_state(PUZZLE_DB, removals)
assert len(list(coin_store.all_unspent_coins())) == 3
#######
# now spend to oblivion with the `-113` hack
coin_spend = SINGLETON_WALLET.coin_spend_for_conditions(
PUZZLE_DB, conditions=[[ConditionOpcode.CREATE_COIN, 0, -113]]
)
spend_bundle = SpendBundle([coin_spend], G2Element())
spend_bundle.debug()
additions, removals = coin_store.update_coin_store_for_spend_bundle(
spend_bundle, now, MAX_BLOCK_COST_CLVM, COST_PER_BYTE
)
update_count = SINGLETON_WALLET.update_state(PUZZLE_DB, removals)
assert update_count == 0
assert len(list(coin_store.all_unspent_coins())) == 2
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/test_taproot.py | tests/wallet/test_taproot.py | from __future__ import annotations
from flax.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle import (
DEFAULT_HIDDEN_PUZZLE,
calculate_synthetic_offset,
calculate_synthetic_public_key,
)
from tests.core.make_block_generator import int_to_public_key
class TestTaproot:
def test_1(self):
for main_secret_exponent in range(500, 600):
hidden_puzzle_hash = DEFAULT_HIDDEN_PUZZLE.get_tree_hash()
main_pubkey = int_to_public_key(main_secret_exponent)
offset = calculate_synthetic_offset(main_pubkey, hidden_puzzle_hash)
offset_pubkey = int_to_public_key(offset)
spk1 = main_pubkey + offset_pubkey
spk2 = calculate_synthetic_public_key(main_pubkey, hidden_puzzle_hash)
assert spk1 == spk2
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/test_flaxlisp.py | tests/wallet/test_flaxlisp.py | from __future__ import annotations
import pytest
from flax.wallet.flaxlisp import (
apply,
args,
cons,
eval,
fail,
first,
is_zero,
make_if,
make_list,
nth,
quote,
rest,
sexp,
)
class TestFlaxlisp:
def test_sexp(self):
assert sexp() == "()"
assert sexp(1) == "(1)"
assert sexp(1, 2) == "(1 2)"
def test_cons(self):
assert cons(1, 2) == "(c 1 2)"
def test_first(self):
assert first("(1)") == "(f (1))"
def test_rest(self):
assert rest("(1)") == "(r (1))"
def test_nth(self):
assert nth("val") == "val"
assert nth("val", 0) == "(f val)"
assert nth("val", 1) == "(f (r val))"
assert nth("val", 2) == "(f (r (r val)))"
assert nth("val", 2, 0) == "(f (f (r (r val))))"
assert nth("val", 2, 1) == "(f (r (f (r (r val)))))"
assert nth("val", 2, 2) == "(f (r (r (f (r (r val))))))"
with pytest.raises(ValueError):
nth("val", -1)
def test_args(self):
assert args() == "1"
assert args(0) == "2"
assert args(1) == "5"
assert args(2) == "11"
assert args(2, 0) == "22"
assert args(2, 1) == "45"
assert args(2, 2) == "91"
with pytest.raises(ValueError):
args(-1)
def test_eval(self):
assert eval("code") == "(a code 1)"
assert eval("code", "env") == "(a code env)"
def test_apply(self):
assert apply("f", ()) == ("(f)")
assert apply("f", ("1")) == ("(f 1)")
assert apply("f", ("1", "2")) == ("(f 1 2)")
def test_quote(self):
assert quote(1) == "(q . 1)"
def test_make_if(self):
assert make_if("p", "t", "f") == "(a (i p (q . t) (q . f)) 1)"
def test_make_list(self):
# Note that nil is self-quoting now
assert make_list() == "()"
assert make_list(1) == "(c 1 ())"
assert make_list(1, 2) == "(c 1 (c 2 ()))"
def test_fail(self):
assert fail("error") == "(x error)"
def test_is_zero(self):
assert is_zero("(q . 1)") == "(= (q . 1) (q . 0))"
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/test_transaction_store.py | tests/wallet/test_transaction_store.py | from __future__ import annotations
import dataclasses
from secrets import token_bytes
from typing import Any, List
import pytest
from flax.types.blockchain_format.coin import Coin
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.mempool_inclusion_status import MempoolInclusionStatus
from flax.util.errors import Err
from flax.util.ints import uint8, uint32, uint64
from flax.wallet.transaction_record import TransactionRecord
from flax.wallet.util.transaction_type import TransactionType
from flax.wallet.wallet_transaction_store import WalletTransactionStore, filter_ok_mempool_status
from tests.util.db_connection import DBConnection
coin_1 = Coin(token_bytes(32), token_bytes(32), uint64(12312))
coin_2 = Coin(token_bytes(32), token_bytes(32), uint64(1234))
coin_3 = Coin(token_bytes(32), token_bytes(32), uint64(12312 - 1234))
tr1 = TransactionRecord(
uint32(0), # confirmed height
uint64(1000), # created_at_time
bytes32(token_bytes(32)), # to_puzzle_hash
uint64(1234), # amount
uint64(12), # fee_amount
False, # confirmed
uint32(0), # sent
None, # Optional[SpendBundle] spend_bundle
[coin_2, coin_3], # additions
[coin_1], # removals
uint32(1), # wallet_id
[], # List[Tuple[str, uint8, Optional[str]]] sent_to
bytes32(token_bytes(32)), # trade_id
uint32(TransactionType.OUTGOING_TX), # type
bytes32(token_bytes(32)), # name
[], # List[Tuple[bytes32, List[bytes]]] memos
)
@pytest.mark.asyncio
async def test_add() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletTransactionStore.create(db_wrapper)
assert await store.get_transaction_record(tr1.name) is None
await store.add_transaction_record(tr1)
assert await store.get_transaction_record(tr1.name) == tr1
@pytest.mark.asyncio
async def test_delete() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletTransactionStore.create(db_wrapper)
await store.add_transaction_record(tr1)
assert await store.get_transaction_record(tr1.name) == tr1
await store.delete_transaction_record(tr1.name)
assert await store.get_transaction_record(tr1.name) is None
@pytest.mark.asyncio
async def test_set_confirmed() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletTransactionStore.create(db_wrapper)
await store.add_transaction_record(tr1)
await store.set_confirmed(tr1.name, uint32(100))
assert await store.get_transaction_record(tr1.name) == dataclasses.replace(
tr1, confirmed=True, confirmed_at_height=uint32(100)
)
@pytest.mark.asyncio
async def test_increment_sent_noop() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletTransactionStore.create(db_wrapper)
assert (
await store.increment_sent(bytes32(token_bytes(32)), "peer1", MempoolInclusionStatus.PENDING, None) is False
)
@pytest.mark.asyncio
async def test_increment_sent() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletTransactionStore.create(db_wrapper)
await store.add_transaction_record(tr1)
tr = await store.get_transaction_record(tr1.name)
assert tr.sent == 0
assert tr.sent_to == []
assert await store.increment_sent(tr1.name, "peer1", MempoolInclusionStatus.PENDING, None) is True
tr = await store.get_transaction_record(tr1.name)
assert tr.sent == 1
assert tr.sent_to == [("peer1", uint8(2), None)]
assert await store.increment_sent(tr1.name, "peer1", MempoolInclusionStatus.SUCCESS, None) is True
tr = await store.get_transaction_record(tr1.name)
assert tr.sent == 1
assert tr.sent_to == [("peer1", uint8(2), None), ("peer1", uint8(1), None)]
assert await store.increment_sent(tr1.name, "peer2", MempoolInclusionStatus.SUCCESS, None) is True
tr = await store.get_transaction_record(tr1.name)
assert tr.sent == 2
assert tr.sent_to == [("peer1", uint8(2), None), ("peer1", uint8(1), None), ("peer2", uint8(1), None)]
@pytest.mark.asyncio
async def test_increment_sent_error() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletTransactionStore.create(db_wrapper)
await store.add_transaction_record(tr1)
tr = await store.get_transaction_record(tr1.name)
assert tr.sent == 0
assert tr.sent_to == []
await store.increment_sent(tr1.name, "peer1", MempoolInclusionStatus.FAILED, Err.MEMPOOL_NOT_INITIALIZED)
tr = await store.get_transaction_record(tr1.name)
assert tr.sent == 1
assert tr.sent_to == [("peer1", uint8(3), "MEMPOOL_NOT_INITIALIZED")]
def test_filter_ok_mempool_status() -> None:
assert filter_ok_mempool_status([("peer1", uint8(1), None)]) == []
assert filter_ok_mempool_status([("peer1", uint8(2), None)]) == []
assert filter_ok_mempool_status([("peer1", uint8(3), None)]) == [("peer1", uint8(3), None)]
assert filter_ok_mempool_status(
[("peer1", uint8(2), None), ("peer1", uint8(1), None), ("peer1", uint8(3), None)]
) == [("peer1", uint8(3), None)]
assert filter_ok_mempool_status([("peer1", uint8(3), "message does not matter")]) == [
("peer1", uint8(3), "message does not matter")
]
assert filter_ok_mempool_status([("peer1", uint8(2), "message does not matter")]) == []
@pytest.mark.asyncio
async def test_tx_reorged_update() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletTransactionStore.create(db_wrapper)
tr = dataclasses.replace(tr1, sent=2, sent_to=[("peer1", uint8(1), None), ("peer2", uint8(1), None)])
await store.add_transaction_record(tr)
tr = await store.get_transaction_record(tr.name)
assert tr.sent == 2
assert tr.sent_to == [("peer1", uint8(1), None), ("peer2", uint8(1), None)]
await store.tx_reorged(tr)
tr = await store.get_transaction_record(tr1.name)
assert tr.sent == 0
assert tr.sent_to == []
@pytest.mark.asyncio
async def test_tx_reorged_add() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletTransactionStore.create(db_wrapper)
tr = dataclasses.replace(tr1, sent=2, sent_to=[("peer1", uint8(1), None), ("peer2", uint8(1), None)])
await store.get_transaction_record(tr.name) is None
await store.tx_reorged(tr)
tr = await store.get_transaction_record(tr.name)
assert tr.sent == 0
assert tr.sent_to == []
@pytest.mark.asyncio
async def test_get_tx_record() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletTransactionStore.create(db_wrapper)
tr2 = dataclasses.replace(tr1, name=token_bytes(32))
tr3 = dataclasses.replace(tr1, name=token_bytes(32))
assert await store.get_transaction_record(tr1.name) is None
await store.add_transaction_record(tr1)
assert await store.get_transaction_record(tr1.name) == tr1
assert await store.get_transaction_record(tr2.name) is None
await store.add_transaction_record(tr2)
assert await store.get_transaction_record(tr2.name) == tr2
assert await store.get_transaction_record(tr3.name) is None
await store.add_transaction_record(tr3)
assert await store.get_transaction_record(tr3.name) == tr3
assert await store.get_transaction_record(tr1.name) == tr1
assert await store.get_transaction_record(tr2.name) == tr2
assert await store.get_transaction_record(tr3.name) == tr3
@pytest.mark.asyncio
async def test_get_farming_rewards() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletTransactionStore.create(db_wrapper)
test_trs: List[TransactionRecord] = []
# tr1 is type OUTGOING_TX
for conf in [True, False]:
for type in [
TransactionType.INCOMING_TX,
TransactionType.OUTGOING_TX,
TransactionType.COINBASE_REWARD,
TransactionType.FEE_REWARD,
TransactionType.INCOMING_TRADE,
TransactionType.OUTGOING_TRADE,
]:
test_trs.append(
dataclasses.replace(
tr1,
name=token_bytes(32),
confirmed=conf,
confirmed_at_height=uint32(100 if conf else 0),
type=type,
)
)
for tr in test_trs:
await store.add_transaction_record(tr)
assert await store.get_transaction_record(tr.name) == tr
rewards = await store.get_farming_rewards()
assert len(rewards) == 2
assert test_trs[2] in rewards
assert test_trs[3] in rewards
@pytest.mark.asyncio
async def test_get_all_unconfirmed() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletTransactionStore.create(db_wrapper)
tr2 = dataclasses.replace(tr1, name=token_bytes(32), confirmed=True, confirmed_at_height=uint32(100))
await store.add_transaction_record(tr1)
await store.add_transaction_record(tr2)
assert await store.get_all_unconfirmed() == [tr1]
@pytest.mark.asyncio
async def test_get_unconfirmed_for_wallet() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletTransactionStore.create(db_wrapper)
tr2 = dataclasses.replace(tr1, name=token_bytes(32), confirmed=True, confirmed_at_height=uint32(100))
tr3 = dataclasses.replace(tr1, name=token_bytes(32), wallet_id=2)
tr4 = dataclasses.replace(tr2, name=token_bytes(32), wallet_id=2)
await store.add_transaction_record(tr1)
await store.add_transaction_record(tr2)
await store.add_transaction_record(tr3)
await store.add_transaction_record(tr4)
assert await store.get_unconfirmed_for_wallet(1) == [tr1]
assert await store.get_unconfirmed_for_wallet(2) == [tr3]
@pytest.mark.asyncio
async def test_transaction_count_for_wallet() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletTransactionStore.create(db_wrapper)
tr2 = dataclasses.replace(tr1, name=token_bytes(32), wallet_id=2)
# 5 transactions in wallet_id 1
await store.add_transaction_record(tr1)
await store.add_transaction_record(dataclasses.replace(tr1, name=token_bytes(32)))
await store.add_transaction_record(dataclasses.replace(tr1, name=token_bytes(32)))
await store.add_transaction_record(dataclasses.replace(tr1, name=token_bytes(32)))
await store.add_transaction_record(dataclasses.replace(tr1, name=token_bytes(32)))
# 2 transactions in wallet_id 2
await store.add_transaction_record(tr2)
await store.add_transaction_record(dataclasses.replace(tr2, name=token_bytes(32)))
assert await store.get_transaction_count_for_wallet(1) == 5
assert await store.get_transaction_count_for_wallet(2) == 2
@pytest.mark.asyncio
async def test_all_transactions_for_wallet() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletTransactionStore.create(db_wrapper)
test_trs: List[TransactionRecord] = []
for wallet_id in [1, 2]:
for type in [
TransactionType.INCOMING_TX,
TransactionType.OUTGOING_TX,
TransactionType.COINBASE_REWARD,
TransactionType.FEE_REWARD,
TransactionType.INCOMING_TRADE,
TransactionType.OUTGOING_TRADE,
]:
test_trs.append(dataclasses.replace(tr1, name=token_bytes(32), wallet_id=wallet_id, type=type))
for tr in test_trs:
await store.add_transaction_record(tr)
assert await store.get_all_transactions_for_wallet(1) == test_trs[:6]
assert await store.get_all_transactions_for_wallet(2) == test_trs[6:]
assert await store.get_all_transactions_for_wallet(1, TransactionType.INCOMING_TX) == [test_trs[0]]
assert await store.get_all_transactions_for_wallet(1, TransactionType.OUTGOING_TX) == [test_trs[1]]
assert await store.get_all_transactions_for_wallet(1, TransactionType.INCOMING_TRADE) == [test_trs[4]]
assert await store.get_all_transactions_for_wallet(1, TransactionType.OUTGOING_TRADE) == [test_trs[5]]
assert await store.get_all_transactions_for_wallet(2, TransactionType.INCOMING_TX) == [test_trs[6]]
assert await store.get_all_transactions_for_wallet(2, TransactionType.OUTGOING_TX) == [test_trs[7]]
assert await store.get_all_transactions_for_wallet(2, TransactionType.INCOMING_TRADE) == [test_trs[10]]
assert await store.get_all_transactions_for_wallet(2, TransactionType.OUTGOING_TRADE) == [test_trs[11]]
def cmp(lhs: List[Any], rhs: List[Any]) -> bool:
if len(rhs) != len(lhs):
return False
for e in lhs:
if e not in rhs:
return False
return True
@pytest.mark.asyncio
async def test_get_all_transactions() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletTransactionStore.create(db_wrapper)
test_trs: List[TransactionRecord] = []
assert await store.get_all_transactions() == []
for wallet_id in [1, 2, 3, 4]:
test_trs.append(dataclasses.replace(tr1, name=token_bytes(32), wallet_id=wallet_id))
for tr in test_trs:
await store.add_transaction_record(tr)
all_trs = await store.get_all_transactions()
assert cmp(all_trs, test_trs)
@pytest.mark.asyncio
async def test_get_transaction_above() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletTransactionStore.create(db_wrapper)
test_trs: List[TransactionRecord] = []
assert await store.get_transaction_above(uint32(0)) == []
for height in range(10):
test_trs.append(dataclasses.replace(tr1, name=token_bytes(32), confirmed_at_height=uint32(height)))
for tr in test_trs:
await store.add_transaction_record(tr)
for height in range(10):
trs = await store.get_transaction_above(uint32(height))
assert cmp(trs, test_trs[height + 1 :])
@pytest.mark.asyncio
async def test_get_tx_by_trade_id() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletTransactionStore.create(db_wrapper)
tr2 = dataclasses.replace(tr1, name=token_bytes(32), trade_id=token_bytes(32))
tr3 = dataclasses.replace(tr1, name=token_bytes(32), trade_id=token_bytes(32))
tr4 = dataclasses.replace(tr1, name=token_bytes(32))
assert await store.get_transactions_by_trade_id(tr1.trade_id) == []
await store.add_transaction_record(tr1)
assert await store.get_transactions_by_trade_id(tr1.trade_id) == [tr1]
assert await store.get_transactions_by_trade_id(tr2.trade_id) == []
await store.add_transaction_record(tr2)
assert await store.get_transactions_by_trade_id(tr2.trade_id) == [tr2]
assert await store.get_transactions_by_trade_id(tr3.trade_id) == []
await store.add_transaction_record(tr3)
assert await store.get_transactions_by_trade_id(tr3.trade_id) == [tr3]
# tr1 and tr4 have the same trade_id
assert await store.get_transactions_by_trade_id(tr4.trade_id) == [tr1]
await store.add_transaction_record(tr4)
assert cmp(await store.get_transactions_by_trade_id(tr4.trade_id), [tr1, tr4])
assert cmp(await store.get_transactions_by_trade_id(tr1.trade_id), [tr1, tr4])
assert await store.get_transactions_by_trade_id(tr2.trade_id) == [tr2]
assert await store.get_transactions_by_trade_id(tr3.trade_id) == [tr3]
assert cmp(await store.get_transactions_by_trade_id(tr4.trade_id), [tr1, tr4])
@pytest.mark.asyncio
async def test_rollback_to_block() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletTransactionStore.create(db_wrapper)
test_trs: List[TransactionRecord] = []
for height in range(10):
test_trs.append(dataclasses.replace(tr1, name=token_bytes(32), confirmed_at_height=uint32(height)))
for tr in test_trs:
await store.add_transaction_record(tr)
await store.rollback_to_block(uint32(6))
all_trs = await store.get_all_transactions()
assert cmp(all_trs, test_trs[:7])
await store.rollback_to_block(uint32(5))
all_trs = await store.get_all_transactions()
assert cmp(all_trs, test_trs[:6])
@pytest.mark.asyncio
async def test_delete_unconfirmed() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletTransactionStore.create(db_wrapper)
tr2 = dataclasses.replace(tr1, name=token_bytes(32), confirmed=True)
tr3 = dataclasses.replace(tr1, name=token_bytes(32), confirmed=True, wallet_id=2)
tr4 = dataclasses.replace(tr1, name=token_bytes(32), wallet_id=2)
await store.add_transaction_record(tr1)
await store.add_transaction_record(tr2)
await store.add_transaction_record(tr3)
await store.add_transaction_record(tr4)
assert cmp(await store.get_all_transactions(), [tr1, tr2, tr3, tr4])
await store.delete_unconfirmed_transactions(1)
assert cmp(await store.get_all_transactions(), [tr2, tr3, tr4])
await store.delete_unconfirmed_transactions(2)
assert cmp(await store.get_all_transactions(), [tr2, tr3])
@pytest.mark.asyncio
async def test_get_transactions_between_confirmed() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletTransactionStore.create(db_wrapper)
tr2 = dataclasses.replace(tr1, name=token_bytes(32), confirmed_at_height=uint32(1))
tr3 = dataclasses.replace(tr1, name=token_bytes(32), confirmed_at_height=uint32(2))
tr4 = dataclasses.replace(tr1, name=token_bytes(32), confirmed_at_height=uint32(3))
tr5 = dataclasses.replace(tr1, name=token_bytes(32), confirmed_at_height=uint32(4))
await store.add_transaction_record(tr1)
await store.add_transaction_record(tr2)
await store.add_transaction_record(tr3)
await store.add_transaction_record(tr4)
await store.add_transaction_record(tr5)
# test different limits
assert await store.get_transactions_between(1, 0, 1) == [tr1]
assert await store.get_transactions_between(1, 0, 2) == [tr1, tr2]
assert await store.get_transactions_between(1, 0, 3) == [tr1, tr2, tr3]
assert await store.get_transactions_between(1, 0, 100) == [tr1, tr2, tr3, tr4, tr5]
# test different start offsets
assert await store.get_transactions_between(1, 1, 100) == [tr2, tr3, tr4, tr5]
assert await store.get_transactions_between(1, 2, 100) == [tr3, tr4, tr5]
assert await store.get_transactions_between(1, 3, 100) == [tr4, tr5]
# wallet 2 is empty
assert await store.get_transactions_between(2, 0, 100) == []
# reverse
# test different limits
assert await store.get_transactions_between(1, 0, 1, reverse=True) == [tr5]
assert await store.get_transactions_between(1, 0, 2, reverse=True) == [tr5, tr4]
assert await store.get_transactions_between(1, 0, 3, reverse=True) == [tr5, tr4, tr3]
assert await store.get_transactions_between(1, 0, 100, reverse=True) == [tr5, tr4, tr3, tr2, tr1]
# test different start offsets
assert await store.get_transactions_between(1, 1, 100, reverse=True) == [tr4, tr3, tr2, tr1]
assert await store.get_transactions_between(1, 2, 100, reverse=True) == [tr3, tr2, tr1]
assert await store.get_transactions_between(1, 3, 100, reverse=True) == [tr2, tr1]
@pytest.mark.asyncio
async def test_get_transactions_between_relevance() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletTransactionStore.create(db_wrapper)
t1 = dataclasses.replace(
tr1, name=token_bytes(32), confirmed=False, confirmed_at_height=uint32(2), created_at_time=1000
)
t2 = dataclasses.replace(
tr1, name=token_bytes(32), confirmed=False, confirmed_at_height=uint32(2), created_at_time=999
)
t3 = dataclasses.replace(
tr1, name=token_bytes(32), confirmed=False, confirmed_at_height=uint32(1), created_at_time=1000
)
t4 = dataclasses.replace(
tr1, name=token_bytes(32), confirmed=False, confirmed_at_height=uint32(1), created_at_time=999
)
t5 = dataclasses.replace(
tr1, name=token_bytes(32), confirmed=True, confirmed_at_height=uint32(2), created_at_time=1000
)
t6 = dataclasses.replace(
tr1, name=token_bytes(32), confirmed=True, confirmed_at_height=uint32(2), created_at_time=999
)
t7 = dataclasses.replace(
tr1, name=token_bytes(32), confirmed=True, confirmed_at_height=uint32(1), created_at_time=1000
)
t8 = dataclasses.replace(
tr1, name=token_bytes(32), confirmed=True, confirmed_at_height=uint32(1), created_at_time=999
)
await store.add_transaction_record(t1)
await store.add_transaction_record(t2)
await store.add_transaction_record(t3)
await store.add_transaction_record(t4)
await store.add_transaction_record(t5)
await store.add_transaction_record(t6)
await store.add_transaction_record(t7)
await store.add_transaction_record(t8)
# test different limits
assert await store.get_transactions_between(1, 0, 1, sort_key="RELEVANCE") == [t1]
assert await store.get_transactions_between(1, 0, 2, sort_key="RELEVANCE") == [t1, t2]
assert await store.get_transactions_between(1, 0, 3, sort_key="RELEVANCE") == [t1, t2, t3]
assert await store.get_transactions_between(1, 0, 100, sort_key="RELEVANCE") == [t1, t2, t3, t4, t5, t6, t7, t8]
# test different start offsets
assert await store.get_transactions_between(1, 1, 100, sort_key="RELEVANCE") == [t2, t3, t4, t5, t6, t7, t8]
assert await store.get_transactions_between(1, 2, 100, sort_key="RELEVANCE") == [t3, t4, t5, t6, t7, t8]
assert await store.get_transactions_between(1, 3, 100, sort_key="RELEVANCE") == [t4, t5, t6, t7, t8]
assert await store.get_transactions_between(1, 4, 100, sort_key="RELEVANCE") == [t5, t6, t7, t8]
# wallet 2 is empty
assert await store.get_transactions_between(2, 0, 100, sort_key="RELEVANCE") == []
# reverse
# test different limits
assert await store.get_transactions_between(1, 0, 1, sort_key="RELEVANCE", reverse=True) == [t8]
assert await store.get_transactions_between(1, 0, 2, sort_key="RELEVANCE", reverse=True) == [t8, t7]
assert await store.get_transactions_between(1, 0, 3, sort_key="RELEVANCE", reverse=True) == [t8, t7, t6]
assert await store.get_transactions_between(1, 0, 100, sort_key="RELEVANCE", reverse=True) == [
t8,
t7,
t6,
t5,
t4,
t3,
t2,
t1,
]
# test different start offsets
assert await store.get_transactions_between(1, 1, 100, sort_key="RELEVANCE", reverse=True) == [
t7,
t6,
t5,
t4,
t3,
t2,
t1,
]
assert await store.get_transactions_between(1, 2, 100, sort_key="RELEVANCE", reverse=True) == [
t6,
t5,
t4,
t3,
t2,
t1,
]
assert await store.get_transactions_between(1, 3, 100, sort_key="RELEVANCE", reverse=True) == [
t5,
t4,
t3,
t2,
t1,
]
@pytest.mark.asyncio
async def test_get_transactions_between_to_puzzle_hash() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletTransactionStore.create(db_wrapper)
ph1 = token_bytes(32)
ph2 = token_bytes(32)
tr2 = dataclasses.replace(tr1, name=token_bytes(32), confirmed_at_height=uint32(1), to_puzzle_hash=ph1)
tr3 = dataclasses.replace(tr1, name=token_bytes(32), confirmed_at_height=uint32(2), to_puzzle_hash=ph1)
tr4 = dataclasses.replace(tr1, name=token_bytes(32), confirmed_at_height=uint32(3), to_puzzle_hash=ph2)
tr5 = dataclasses.replace(tr1, name=token_bytes(32), confirmed_at_height=uint32(4), to_puzzle_hash=ph2)
await store.add_transaction_record(tr1)
await store.add_transaction_record(tr2)
await store.add_transaction_record(tr3)
await store.add_transaction_record(tr4)
await store.add_transaction_record(tr5)
# test different limits
assert await store.get_transactions_between(1, 0, 100, to_puzzle_hash=ph1) == [tr2, tr3]
assert await store.get_transactions_between(1, 0, 100, to_puzzle_hash=ph2) == [tr4, tr5]
# test different start offsets
assert await store.get_transactions_between(1, 1, 100, to_puzzle_hash=ph1) == [tr3]
assert await store.get_transactions_between(1, 1, 100, to_puzzle_hash=ph2) == [tr5]
# reverse
# test different limits
assert await store.get_transactions_between(1, 0, 100, to_puzzle_hash=ph1, reverse=True) == [tr3, tr2]
assert await store.get_transactions_between(1, 0, 100, to_puzzle_hash=ph2, reverse=True) == [tr5, tr4]
# test different start offsets
assert await store.get_transactions_between(1, 1, 100, to_puzzle_hash=ph1, reverse=True) == [tr2]
assert await store.get_transactions_between(1, 1, 100, to_puzzle_hash=ph2, reverse=True) == [tr4]
@pytest.mark.asyncio
async def test_get_not_sent() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletTransactionStore.create(db_wrapper)
tr2 = dataclasses.replace(tr1, name=token_bytes(32), confirmed=True, confirmed_at_height=uint32(1))
tr3 = dataclasses.replace(tr1, name=token_bytes(32))
tr4 = dataclasses.replace(tr1, name=token_bytes(32))
await store.add_transaction_record(tr1)
await store.add_transaction_record(tr2)
await store.add_transaction_record(tr3)
await store.add_transaction_record(tr4)
not_sent = await store.get_not_sent()
assert cmp(not_sent, [tr1, tr3, tr4])
not_sent = await store.get_not_sent()
assert cmp(not_sent, [tr1, tr3, tr4])
not_sent = await store.get_not_sent()
assert cmp(not_sent, [tr1, tr3, tr4])
not_sent = await store.get_not_sent()
assert cmp(not_sent, [tr1, tr3, tr4])
not_sent = await store.get_not_sent()
assert cmp(not_sent, [tr1, tr3, tr4])
# the 6th time we call this function, we don't get any unsent txs
not_sent = await store.get_not_sent()
assert cmp(not_sent, [])
# TODO: also cover include_accepted_txs=True
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/test_wallet_node.py | tests/wallet/test_wallet_node.py | from __future__ import annotations
import sys
from pathlib import Path
from typing import Any, Dict, Optional
import pytest
from blspy import PrivateKey
from flax.util.config import load_config
from flax.util.keychain import Keychain, generate_mnemonic
from flax.wallet.wallet_node import WalletNode
from tests.setup_nodes import test_constants
@pytest.mark.asyncio
async def test_get_private_key(root_path_populated_with_config: Path, get_temp_keyring: Keychain) -> None:
root_path: Path = root_path_populated_with_config
keychain: Keychain = get_temp_keyring
config: Dict[str, Any] = load_config(root_path, "config.yaml", "wallet")
node: WalletNode = WalletNode(config, root_path, test_constants, keychain)
sk: PrivateKey = keychain.add_private_key(generate_mnemonic())
fingerprint: int = sk.get_g1().get_fingerprint()
key = await node.get_private_key(fingerprint)
assert key is not None
assert key.get_g1().get_fingerprint() == fingerprint
@pytest.mark.asyncio
async def test_get_private_key_default_key(root_path_populated_with_config: Path, get_temp_keyring: Keychain) -> None:
root_path: Path = root_path_populated_with_config
keychain: Keychain = get_temp_keyring
config: Dict[str, Any] = load_config(root_path, "config.yaml", "wallet")
node: WalletNode = WalletNode(config, root_path, test_constants, keychain)
sk: PrivateKey = keychain.add_private_key(generate_mnemonic())
fingerprint: int = sk.get_g1().get_fingerprint()
# Add a couple more keys
keychain.add_private_key(generate_mnemonic())
keychain.add_private_key(generate_mnemonic())
# When no fingerprint is provided, we should get the default (first) key
key = await node.get_private_key(None)
assert key is not None
assert key.get_g1().get_fingerprint() == fingerprint
@pytest.mark.asyncio
@pytest.mark.parametrize("fingerprint", [None, 1234567890])
async def test_get_private_key_missing_key(
root_path_populated_with_config: Path, get_temp_keyring: Keychain, fingerprint: Optional[int]
) -> None:
root_path: Path = root_path_populated_with_config
keychain: Keychain = get_temp_keyring # empty keyring
config: Dict[str, Any] = load_config(root_path, "config.yaml", "wallet")
node: WalletNode = WalletNode(config, root_path, test_constants, keychain)
# Keyring is empty, so requesting a key by fingerprint or None should return None
key = await node.get_private_key(fingerprint)
assert key is None
@pytest.mark.asyncio
async def test_get_private_key_missing_key_use_default(
root_path_populated_with_config: Path, get_temp_keyring: Keychain
) -> None:
root_path: Path = root_path_populated_with_config
keychain: Keychain = get_temp_keyring
config: Dict[str, Any] = load_config(root_path, "config.yaml", "wallet")
node: WalletNode = WalletNode(config, root_path, test_constants, keychain)
sk: PrivateKey = keychain.add_private_key(generate_mnemonic())
fingerprint: int = sk.get_g1().get_fingerprint()
# Stupid sanity check that the fingerprint we're going to use isn't actually in the keychain
assert fingerprint != 1234567890
# When fingerprint is provided and the key is missing, we should get the default (first) key
key = await node.get_private_key(1234567890)
assert key is not None
assert key.get_g1().get_fingerprint() == fingerprint
def test_log_in(root_path_populated_with_config: Path, get_temp_keyring: Keychain) -> None:
root_path: Path = root_path_populated_with_config
keychain: Keychain = get_temp_keyring
config: Dict[str, Any] = load_config(root_path, "config.yaml", "wallet")
node: WalletNode = WalletNode(config, root_path, test_constants)
sk: PrivateKey = keychain.add_private_key(generate_mnemonic())
fingerprint: int = sk.get_g1().get_fingerprint()
node.log_in(sk)
assert node.logged_in is True
assert node.logged_in_fingerprint == fingerprint
assert node.get_last_used_fingerprint() == fingerprint
def test_log_in_failure_to_write_last_used_fingerprint(
root_path_populated_with_config: Path, get_temp_keyring: Keychain, monkeypatch: Any
) -> None:
called_update_last_used_fingerprint: bool = False
def patched_update_last_used_fingerprint(self: Any) -> None:
nonlocal called_update_last_used_fingerprint
called_update_last_used_fingerprint = True
raise Exception("Generic write failure")
with monkeypatch.context() as m:
m.setattr(WalletNode, "update_last_used_fingerprint", patched_update_last_used_fingerprint)
root_path: Path = root_path_populated_with_config
keychain: Keychain = get_temp_keyring
config: Dict[str, Any] = load_config(root_path, "config.yaml", "wallet")
node: WalletNode = WalletNode(config, root_path, test_constants)
sk: PrivateKey = keychain.add_private_key(generate_mnemonic())
fingerprint: int = sk.get_g1().get_fingerprint()
# Expect log_in to succeed, even though we can't write the last used fingerprint
node.log_in(sk)
assert node.logged_in is True
assert node.logged_in_fingerprint == fingerprint
assert node.get_last_used_fingerprint() is None
assert called_update_last_used_fingerprint is True
def test_log_out(root_path_populated_with_config: Path, get_temp_keyring: Keychain) -> None:
root_path: Path = root_path_populated_with_config
keychain: Keychain = get_temp_keyring
config: Dict[str, Any] = load_config(root_path, "config.yaml", "wallet")
node: WalletNode = WalletNode(config, root_path, test_constants)
sk: PrivateKey = keychain.add_private_key(generate_mnemonic())
fingerprint: int = sk.get_g1().get_fingerprint()
node.log_in(sk)
assert node.logged_in is True
assert node.logged_in_fingerprint == fingerprint
assert node.get_last_used_fingerprint() == fingerprint
node.log_out() # type: ignore
assert node.logged_in is False
assert node.logged_in_fingerprint is None
assert node.get_last_used_fingerprint() == fingerprint
def test_get_last_used_fingerprint_path(root_path_populated_with_config: Path) -> None:
root_path: Path = root_path_populated_with_config
config: Dict[str, Any] = load_config(root_path, "config.yaml", "wallet")
node: WalletNode = WalletNode(config, root_path, test_constants)
path: Optional[Path] = node.get_last_used_fingerprint_path()
assert path == root_path / "wallet" / "db" / "last_used_fingerprint"
def test_get_last_used_fingerprint(root_path_populated_with_config: Path) -> None:
path: Path = root_path_populated_with_config / "wallet" / "db" / "last_used_fingerprint"
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text("1234567890")
root_path: Path = root_path_populated_with_config
config: Dict[str, Any] = load_config(root_path, "config.yaml", "wallet")
node: WalletNode = WalletNode(config, root_path, test_constants)
last_used_fingerprint: Optional[int] = node.get_last_used_fingerprint()
assert last_used_fingerprint == 1234567890
def test_get_last_used_fingerprint_file_doesnt_exist(root_path_populated_with_config: Path) -> None:
root_path: Path = root_path_populated_with_config
config: Dict[str, Any] = load_config(root_path, "config.yaml", "wallet")
node: WalletNode = WalletNode(config, root_path, test_constants)
last_used_fingerprint: Optional[int] = node.get_last_used_fingerprint()
assert last_used_fingerprint is None
def test_get_last_used_fingerprint_file_cant_read_unix(root_path_populated_with_config: Path) -> None:
if sys.platform in ["win32", "cygwin"]:
pytest.skip("Setting UNIX file permissions doesn't apply to Windows")
root_path: Path = root_path_populated_with_config
config: Dict[str, Any] = load_config(root_path, "config.yaml", "wallet")
node: WalletNode = WalletNode(config, root_path, test_constants)
path: Path = node.get_last_used_fingerprint_path()
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text("1234567890")
assert node.get_last_used_fingerprint() == 1234567890
# Make the file unreadable
path.chmod(0o000)
last_used_fingerprint: Optional[int] = node.get_last_used_fingerprint()
assert last_used_fingerprint is None
# Verify that the file is unreadable
with pytest.raises(PermissionError):
path.read_text()
# Calling get_last_used_fingerprint() should not throw an exception
assert node.get_last_used_fingerprint() is None
path.chmod(0o600)
def test_get_last_used_fingerprint_file_cant_read_win32(
root_path_populated_with_config: Path, monkeypatch: Any
) -> None:
if sys.platform not in ["win32", "cygwin"]:
pytest.skip("Windows-specific test")
called_read_text: bool = False
def patched_pathlib_path_read_text(self: Any) -> str:
nonlocal called_read_text
called_read_text = True
raise PermissionError("Permission denied")
root_path: Path = root_path_populated_with_config
config: Dict[str, Any] = load_config(root_path, "config.yaml", "wallet")
node: WalletNode = WalletNode(config, root_path, test_constants)
path: Path = node.get_last_used_fingerprint_path()
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text("1234567890")
assert node.get_last_used_fingerprint() == 1234567890
# Make the file unreadable. Doing this with pywin32 is more trouble than it's worth. All we care about is that
# get_last_used_fingerprint doesn't throw an exception.
with monkeypatch.context() as m:
from pathlib import WindowsPath
m.setattr(WindowsPath, "read_text", patched_pathlib_path_read_text)
# Calling get_last_used_fingerprint() should not throw an exception
last_used_fingerprint: Optional[int] = node.get_last_used_fingerprint()
# Verify that the file is unreadable
assert called_read_text is True
assert last_used_fingerprint is None
def test_get_last_used_fingerprint_file_with_whitespace(root_path_populated_with_config: Path) -> None:
root_path: Path = root_path_populated_with_config
config: Dict[str, Any] = load_config(root_path, "config.yaml", "wallet")
node: WalletNode = WalletNode(config, root_path, test_constants)
path: Path = node.get_last_used_fingerprint_path()
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text("\n\r\n \t1234567890\r\n\n")
assert node.get_last_used_fingerprint() == 1234567890
def test_update_last_used_fingerprint_missing_fingerprint(root_path_populated_with_config: Path) -> None:
root_path: Path = root_path_populated_with_config
config: Dict[str, Any] = load_config(root_path, "config.yaml", "wallet")
node: WalletNode = WalletNode(config, root_path, test_constants)
node.logged_in_fingerprint = None
with pytest.raises(AssertionError):
node.update_last_used_fingerprint()
def test_update_last_used_fingerprint_create_intermediate_dirs(root_path_populated_with_config: Path) -> None:
root_path: Path = root_path_populated_with_config
config: Dict[str, Any] = load_config(root_path, "config.yaml", "wallet")
node: WalletNode = WalletNode(config, root_path, test_constants)
node.logged_in_fingerprint = 9876543210
path = node.get_last_used_fingerprint_path()
assert path.parent.exists() is False
node.update_last_used_fingerprint()
assert path.parent.exists() is True
def test_update_last_used_fingerprint(root_path_populated_with_config: Path) -> None:
root_path: Path = root_path_populated_with_config
config: Dict[str, Any] = load_config(root_path, "config.yaml", "wallet")
node: WalletNode = WalletNode(config, root_path, test_constants)
node.logged_in_fingerprint = 9876543210
path = node.get_last_used_fingerprint_path()
node.update_last_used_fingerprint()
assert path.exists() is True
assert path.read_text() == "9876543210"
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/test_wallet_key_val_store.py | tests/wallet/test_wallet_key_val_store.py | import pytest
from flax.types.full_block import FullBlock
from flax.types.header_block import HeaderBlock
from flax.wallet.key_val_store import KeyValStore
from tests.util.db_connection import DBConnection
class TestWalletKeyValStore:
@pytest.mark.asyncio
async def test_store(self, bt):
async with DBConnection(1) as db_wrapper:
store = await KeyValStore.create(db_wrapper)
blocks = bt.get_consecutive_blocks(20)
block: FullBlock = blocks[0]
block_2: FullBlock = blocks[1]
assert (await store.get_object("a", FullBlock)) is None
await store.set_object("a", block)
assert await store.get_object("a", FullBlock) == block
await store.set_object("a", block)
assert await store.get_object("a", FullBlock) == block
await store.set_object("a", block_2)
await store.set_object("a", block_2)
assert await store.get_object("a", FullBlock) == block_2
await store.remove_object("a")
assert (await store.get_object("a", FullBlock)) is None
for block in blocks:
assert (await store.get_object(block.header_hash.hex(), FullBlock)) is None
await store.set_object(block.header_hash.hex(), block)
assert (await store.get_object(block.header_hash.hex(), FullBlock)) == block
# Wrong type
await store.set_object("a", block_2)
with pytest.raises(Exception):
await store.get_object("a", HeaderBlock)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/test_wallet_trade_store.py | tests/wallet/test_wallet_trade_store.py | from __future__ import annotations
import time
from secrets import token_bytes
import pytest
from flax.types.blockchain_format.coin import Coin
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.ints import uint32, uint64
from flax.wallet.trade_record import TradeRecord
from flax.wallet.trading.trade_status import TradeStatus
from flax.wallet.trading.trade_store import TradeStore, migrate_coin_of_interest
from flax.wallet.util.wallet_types import WalletType
from flax.wallet.wallet_coin_record import WalletCoinRecord
from flax.wallet.wallet_coin_store import WalletCoinStore
from tests.util.db_connection import DBConnection
coin_1 = Coin(token_bytes(32), token_bytes(32), uint64(12311))
coin_2 = Coin(coin_1.parent_coin_info, token_bytes(32), uint64(12312))
coin_3 = Coin(coin_1.parent_coin_info, token_bytes(32), uint64(12313))
record_1 = WalletCoinRecord(coin_1, uint32(4), uint32(0), False, True, WalletType.STANDARD_WALLET, 0)
record_2 = WalletCoinRecord(coin_2, uint32(5), uint32(0), False, True, WalletType.STANDARD_WALLET, 0)
record_3 = WalletCoinRecord(coin_3, uint32(6), uint32(0), False, True, WalletType.STANDARD_WALLET, 0)
@pytest.mark.asyncio
async def test_get_coins_of_interest_with_trade_statuses() -> None:
async with DBConnection(1) as db_wrapper:
coin_store = await WalletCoinStore.create(db_wrapper)
trade_store = await TradeStore.create(db_wrapper)
await coin_store.add_coin_record(record_1)
await coin_store.add_coin_record(record_2)
await coin_store.add_coin_record(record_3)
tr1_name: bytes32 = bytes32(token_bytes(32))
tr1 = TradeRecord(
confirmed_at_index=uint32(0),
accepted_at_time=None,
created_at_time=uint64(time.time()),
is_my_offer=True,
sent=uint32(0),
offer=bytes([1, 2, 3]),
taken_offer=None,
coins_of_interest=[coin_2],
trade_id=tr1_name,
status=uint32(TradeStatus.PENDING_ACCEPT.value),
sent_to=[],
)
await trade_store.add_trade_record(tr1)
tr2_name: bytes32 = bytes32(token_bytes(32))
tr2 = TradeRecord(
confirmed_at_index=uint32(0),
accepted_at_time=None,
created_at_time=uint64(time.time()),
is_my_offer=True,
sent=uint32(0),
offer=bytes([1, 2, 3]),
taken_offer=None,
coins_of_interest=[coin_1, coin_3],
trade_id=tr2_name,
status=uint32(TradeStatus.PENDING_CONFIRM.value),
sent_to=[],
)
await trade_store.add_trade_record(tr2)
assert await trade_store.get_coin_ids_of_interest_with_trade_statuses([TradeStatus.PENDING_CONFIRM]) == {
coin_1.name(),
coin_3.name(),
}
assert await trade_store.get_coin_ids_of_interest_with_trade_statuses([TradeStatus.PENDING_ACCEPT]) == {
coin_2.name()
}
# test replace trade record
tr2_1 = TradeRecord(
confirmed_at_index=uint32(0),
accepted_at_time=None,
created_at_time=uint64(time.time()),
is_my_offer=True,
sent=uint32(0),
offer=bytes([1, 2, 3]),
taken_offer=None,
coins_of_interest=[coin_2],
trade_id=tr2_name,
status=uint32(TradeStatus.PENDING_CONFIRM.value),
sent_to=[],
)
await trade_store.add_trade_record(tr2_1)
assert await trade_store.get_coin_ids_of_interest_with_trade_statuses([TradeStatus.PENDING_CONFIRM]) == {
coin_2.name()
}
# test migration
async with trade_store.db_wrapper.writer_maybe_transaction() as conn:
await conn.execute("DELETE FROM coin_of_interest_to_trade_record")
assert await trade_store.get_coin_ids_of_interest_with_trade_statuses([TradeStatus.PENDING_ACCEPT]) == set()
async with trade_store.db_wrapper.writer_maybe_transaction() as conn:
await migrate_coin_of_interest(trade_store.log, conn)
assert await trade_store.get_coin_ids_of_interest_with_trade_statuses([TradeStatus.PENDING_ACCEPT]) == {
coin_2.name()
}
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/test_wallet_retry.py | tests/wallet/test_wallet_retry.py | from __future__ import annotations
import asyncio
from typing import Any, List, Optional, Tuple
import pytest
from flax.full_node.full_node_api import FullNodeAPI
from flax.simulator.block_tools import BlockTools
from flax.simulator.full_node_simulator import FullNodeSimulator
from flax.simulator.time_out_assert import time_out_assert, time_out_assert_custom_interval
from flax.types.peer_info import PeerInfo
from flax.types.spend_bundle import SpendBundle
from flax.util.ints import uint16, uint64
from flax.wallet.transaction_record import TransactionRecord
from flax.wallet.wallet_node import WalletNode
from tests.pools.test_pool_rpc import farm_blocks
from tests.util.wallet_is_synced import wallet_is_synced
def assert_sb_in_pool(node: FullNodeAPI, sb: SpendBundle) -> None:
assert sb == node.full_node.mempool_manager.get_spendbundle(sb.name())
def assert_sb_not_in_pool(node: FullNodeAPI, sb: SpendBundle) -> None:
assert node.full_node.mempool_manager.get_spendbundle(sb.name()) is None
assert not node.full_node.mempool_manager.seen(sb.name())
def evict_from_pool(node: FullNodeAPI, sb: SpendBundle) -> None:
mempool_item = node.full_node.mempool_manager.mempool.spends[sb.name()]
node.full_node.mempool_manager.mempool.remove_from_pool([mempool_item.name])
node.full_node.mempool_manager.remove_seen(sb.name())
@pytest.mark.asyncio
async def test_wallet_tx_retry(
setup_two_nodes_and_wallet_fast_retry: Tuple[List[FullNodeSimulator], List[Tuple[Any, Any]], BlockTools],
self_hostname: str,
) -> None:
wait_secs = 20
nodes, wallets, bt = setup_two_nodes_and_wallet_fast_retry
server_1 = nodes[0].full_node.server
full_node_1: FullNodeSimulator = nodes[0]
wallet_node_1: WalletNode = wallets[0][0]
wallet_node_1.config["tx_resend_timeout_secs"] = 5
wallet_server_1 = wallets[0][1]
wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
reward_ph = await wallet_1.get_new_puzzlehash()
await wallet_server_1.start_client(PeerInfo(self_hostname, uint16(server_1._port)), None)
await farm_blocks(full_node_1, reward_ph, 2)
await time_out_assert(wait_secs, wallet_is_synced, True, wallet_node_1, full_node_1)
transaction: TransactionRecord = await wallet_1.generate_signed_transaction(uint64(100), reward_ph)
sb1: Optional[SpendBundle] = transaction.spend_bundle
assert sb1 is not None
await wallet_1.push_transaction(transaction)
async def sb_in_mempool() -> bool:
return full_node_1.full_node.mempool_manager.get_spendbundle(transaction.name) == transaction.spend_bundle
# SpendBundle is accepted by peer
await time_out_assert(wait_secs, sb_in_mempool)
# Evict SpendBundle from peer
evict_from_pool(full_node_1, sb1)
assert_sb_not_in_pool(full_node_1, sb1)
# Wait some time so wallet will retry
await asyncio.sleep(2)
our_ph = await wallet_1.get_new_puzzlehash()
await farm_blocks(full_node_1, our_ph, 2)
# Wait for wallet to catch up
await time_out_assert(wait_secs, wallet_is_synced, True, wallet_node_1, full_node_1)
async def check_transaction_in_mempool_or_confirmed(transaction: TransactionRecord) -> bool:
txn = await wallet_node_1.wallet_state_manager.get_transaction(transaction.name)
assert txn is not None
sb = txn.spend_bundle
assert sb is not None
full_node_sb = full_node_1.full_node.mempool_manager.get_spendbundle(sb.name())
if full_node_sb is None:
return False
in_mempool: bool = full_node_sb.name() == sb.name()
return txn.confirmed or in_mempool
# Check that wallet resent the unconfirmed SpendBundle
await time_out_assert_custom_interval(wait_secs, 1, check_transaction_in_mempool_or_confirmed, True, transaction)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/test_wallet_interested_store.py | tests/wallet/test_wallet_interested_store.py | from secrets import token_bytes
import pytest
from flax.types.blockchain_format.coin import Coin
from flax.util.ints import uint64
from flax.wallet.wallet_interested_store import WalletInterestedStore
from tests.util.db_connection import DBConnection
class TestWalletInterestedStore:
@pytest.mark.asyncio
async def test_store(self):
async with DBConnection(1) as db_wrapper:
store = await WalletInterestedStore.create(db_wrapper)
coin_1 = Coin(token_bytes(32), token_bytes(32), uint64(12312))
coin_2 = Coin(token_bytes(32), token_bytes(32), uint64(12312))
assert (await store.get_interested_coin_ids()) == []
await store.add_interested_coin_id(coin_1.name())
assert (await store.get_interested_coin_ids()) == [coin_1.name()]
await store.add_interested_coin_id(coin_1.name())
assert (await store.get_interested_coin_ids()) == [coin_1.name()]
await store.add_interested_coin_id(coin_2.name())
assert set(await store.get_interested_coin_ids()) == {coin_1.name(), coin_2.name()}
puzzle_hash = token_bytes(32)
assert len(await store.get_interested_puzzle_hashes()) == 0
await store.add_interested_puzzle_hash(puzzle_hash, 2)
assert len(await store.get_interested_puzzle_hashes()) == 1
await store.add_interested_puzzle_hash(puzzle_hash, 2)
assert len(await store.get_interested_puzzle_hashes()) == 1
assert (await store.get_interested_puzzle_hash_wallet_id(puzzle_hash)) == 2
await store.add_interested_puzzle_hash(puzzle_hash, 3)
assert len(await store.get_interested_puzzle_hashes()) == 1
assert (await store.get_interested_puzzle_hash_wallet_id(puzzle_hash)) == 3
await store.remove_interested_puzzle_hash(puzzle_hash)
assert (await store.get_interested_puzzle_hash_wallet_id(puzzle_hash)) is None
assert len(await store.get_interested_puzzle_hashes()) == 0
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/config.py | tests/wallet/config.py | # flake8: noqa: E501
from __future__ import annotations
job_timeout = 40
parallel = True
checkout_blocks_and_plots = True
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/test_coin_selection.py | tests/wallet/test_coin_selection.py | from __future__ import annotations
import logging
import time
from random import randrange
from typing import List, Set
import pytest
from flax.consensus.default_constants import DEFAULT_CONSTANTS
from flax.types.blockchain_format.coin import Coin
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.hash import std_hash
from flax.util.ints import uint32, uint64, uint128
from flax.wallet.coin_selection import (
check_for_exact_match,
knapsack_coin_algorithm,
select_coins,
select_smallest_coin_over_target,
sum_largest_coins,
)
from flax.wallet.util.wallet_types import WalletType
from flax.wallet.wallet_coin_record import WalletCoinRecord
log = logging.getLogger(__name__)
class TestCoinSelection:
@pytest.fixture(scope="function")
def a_hash(self) -> bytes32:
return std_hash(b"a")
def test_exact_match(self, a_hash: bytes32) -> None:
coin_list = [
Coin(a_hash, a_hash, uint64(220000)),
Coin(a_hash, a_hash, uint64(120000)),
Coin(a_hash, a_hash, uint64(22)),
]
assert check_for_exact_match(coin_list, uint64(220000)) == coin_list[0]
assert check_for_exact_match(coin_list, uint64(22)) == coin_list[2]
# check for no match.
assert check_for_exact_match(coin_list, uint64(20)) is None
def test_knapsack_coin_selection(self, a_hash: bytes32) -> None:
tries = 100
coins_to_append = 1000
amounts = list(range(1, coins_to_append))
amounts.sort(reverse=True)
coin_list: List[Coin] = [Coin(a_hash, a_hash, uint64(100000000 * a)) for a in amounts]
for i in range(tries):
knapsack = knapsack_coin_algorithm(
coin_list, uint128(30000000000000), DEFAULT_CONSTANTS.MAX_COIN_AMOUNT, 999999, seed=bytes([i])
)
assert knapsack is not None
assert sum([coin.amount for coin in knapsack]) >= 310000000
def test_knapsack_coin_selection_2(self, a_hash: bytes32) -> None:
coin_amounts = [6, 20, 40, 80, 150, 160, 203, 202, 201, 320]
coin_amounts.sort(reverse=True)
coin_list: List[Coin] = [Coin(a_hash, a_hash, uint64(a)) for a in coin_amounts]
# coin_list = set([coin for a in coin_amounts])
for i in range(100):
knapsack = knapsack_coin_algorithm(
coin_list, uint128(265), DEFAULT_CONSTANTS.MAX_COIN_AMOUNT, 99999, seed=bytes([i])
)
assert knapsack is not None
selected_sum = sum(coin.amount for coin in list(knapsack))
assert 265 <= selected_sum <= 281 # Selects a set of coins which does exceed by too much
@pytest.mark.asyncio
async def test_coin_selection_randomly(self, a_hash: bytes32) -> None:
coin_base_amounts = [3, 6, 20, 40, 80, 150, 160, 203, 202, 201, 320]
coin_amounts = []
spendable_amount = 0
# this is possibly overkill, but it's a good test.
for i in range(3000):
for amount in coin_base_amounts:
c_amount = randrange(1, 10000000) * amount
coin_amounts.append(c_amount)
spendable_amount += c_amount
spendable_amount = uint128(spendable_amount)
coin_list: List[WalletCoinRecord] = [
WalletCoinRecord(Coin(a_hash, a_hash, uint64(a)), uint32(1), uint32(1), False, True, WalletType(0), 1)
for a in coin_amounts
]
for target_amount in coin_amounts[:100]: # select the first 100 values
result: Set[Coin] = await select_coins(
spendable_amount,
uint64(DEFAULT_CONSTANTS.MAX_COIN_AMOUNT),
coin_list,
{},
logging.getLogger("test"),
uint128(target_amount),
)
assert result is not None
assert sum([coin.amount for coin in result]) >= target_amount
assert len(result) <= 500
@pytest.mark.asyncio
async def test_coin_selection_with_dust(self, a_hash: bytes32) -> None:
spendable_amount = uint128(5000000000000 + 10000)
coin_list: List[WalletCoinRecord] = [
WalletCoinRecord(
Coin(a_hash, a_hash, uint64(5000000000000)), uint32(1), uint32(1), False, True, WalletType(0), 1
)
]
for i in range(10000):
coin_list.append(
WalletCoinRecord(
Coin(a_hash, std_hash(i), uint64(1)), uint32(1), uint32(1), False, True, WalletType(0), 1
)
)
# make sure coins are not identical.
for target_amount in [10000, 9999]:
print("Target amount: ", target_amount)
result: Set[Coin] = await select_coins(
spendable_amount,
uint64(DEFAULT_CONSTANTS.MAX_COIN_AMOUNT),
coin_list,
{},
logging.getLogger("test"),
uint128(target_amount),
)
assert result is not None
assert sum([coin.amount for coin in result]) >= target_amount
assert len(result) == 1 # only one coin should be selected
for i in range(100):
coin_list.append(
WalletCoinRecord(
Coin(a_hash, std_hash(i), uint64(2000)), uint32(1), uint32(1), False, True, WalletType(0), 1
)
)
spendable_amount = uint128(spendable_amount + 2000 * 100)
for target_amount in [50000, 25000, 15000, 10000, 9000, 3000]: # select the first 100 values
dusty_result: Set[Coin] = await select_coins(
spendable_amount,
uint64(DEFAULT_CONSTANTS.MAX_COIN_AMOUNT),
coin_list,
{},
logging.getLogger("test"),
uint128(target_amount),
)
assert dusty_result is not None
assert sum([coin.amount for coin in dusty_result]) >= target_amount
for coin in dusty_result:
assert coin.amount > 1
assert len(dusty_result) <= 500
# test when we have multiple coins under target, and a lot of dust coins.
spendable_amount = uint128(25000 + 10000)
new_coin_list: List[WalletCoinRecord] = []
for i in range(5):
new_coin_list.append(
WalletCoinRecord(
Coin(a_hash, std_hash(i), uint64(5000)), uint32(1), uint32(1), False, True, WalletType(0), 1
)
)
for i in range(10000):
new_coin_list.append(
WalletCoinRecord(
Coin(a_hash, std_hash(i), uint64(1)), uint32(1), uint32(1), False, True, WalletType(0), 1
)
)
for target_amount in [20000, 15000, 10000, 5000]: # select the first 100 values
dusty_below_target: Set[Coin] = await select_coins(
spendable_amount,
uint64(DEFAULT_CONSTANTS.MAX_COIN_AMOUNT),
new_coin_list,
{},
logging.getLogger("test"),
uint128(target_amount),
)
assert dusty_below_target is not None
assert sum([coin.amount for coin in dusty_below_target]) >= target_amount
for coin in dusty_below_target:
assert coin.amount == 5000
assert len(dusty_below_target) <= 500
@pytest.mark.asyncio
async def test_dust_and_one_large_coin(self, a_hash: bytes32) -> None:
# test when we have a lot of dust and 1 large coin
spendable_amount = uint128(50000 + 10000)
new_coin_list: List[WalletCoinRecord] = [
WalletCoinRecord(
Coin(a_hash, std_hash(b"123"), uint64(50000)), uint32(1), uint32(1), False, True, WalletType(0), 1
)
]
for i in range(10000):
new_coin_list.append(
WalletCoinRecord(
Coin(a_hash, std_hash(i), uint64(1)), uint32(1), uint32(1), False, True, WalletType(0), 1
)
)
for target_amount in [50000, 10001, 10000, 9999]:
dusty_below_target: Set[Coin] = await select_coins(
spendable_amount,
uint64(DEFAULT_CONSTANTS.MAX_COIN_AMOUNT),
new_coin_list,
{},
logging.getLogger("test"),
uint128(target_amount),
)
assert dusty_below_target is not None
assert sum([coin.amount for coin in dusty_below_target]) >= target_amount
assert len(dusty_below_target) <= 500
@pytest.mark.asyncio
async def test_coin_selection_failure(self, a_hash: bytes32) -> None:
spendable_amount = uint128(10000)
coin_list: List[WalletCoinRecord] = []
for i in range(10000):
coin_list.append(
WalletCoinRecord(
Coin(a_hash, std_hash(i), uint64(1)), uint32(1), uint32(1), False, True, WalletType(0), 1
)
)
# make sure coins are not identical.
# test for failure
with pytest.raises(ValueError):
for target_amount in [10000, 9999]:
await select_coins(
spendable_amount,
uint64(DEFAULT_CONSTANTS.MAX_COIN_AMOUNT),
coin_list,
{},
logging.getLogger("test"),
uint128(target_amount),
)
# test not enough coin failure.
with pytest.raises(ValueError):
for target_amount in [10001, 20000]:
await select_coins(
spendable_amount,
uint64(DEFAULT_CONSTANTS.MAX_COIN_AMOUNT),
coin_list,
{},
logging.getLogger("test"),
uint128(target_amount),
)
@pytest.mark.asyncio
async def test_coin_selection(self, a_hash: bytes32) -> None:
coin_amounts = [3, 6, 20, 40, 80, 150, 160, 203, 202, 201, 320]
coin_list: List[WalletCoinRecord] = [
WalletCoinRecord(Coin(a_hash, a_hash, uint64(a)), uint32(1), uint32(1), False, True, WalletType(0), 1)
for a in coin_amounts
]
spendable_amount = uint128(sum(coin_amounts))
# check for exact match
target_amount = uint128(40)
exact_match_result: Set[Coin] = await select_coins(
spendable_amount,
uint64(DEFAULT_CONSTANTS.MAX_COIN_AMOUNT),
coin_list,
{},
logging.getLogger("test"),
target_amount,
)
assert exact_match_result is not None
assert sum([coin.amount for coin in exact_match_result]) >= target_amount
assert len(exact_match_result) == 1
# check for match of 2
target_amount = uint128(153)
match_2: Set[Coin] = await select_coins(
spendable_amount,
uint64(DEFAULT_CONSTANTS.MAX_COIN_AMOUNT),
coin_list,
{},
logging.getLogger("test"),
target_amount,
)
assert match_2 is not None
assert sum([coin.amount for coin in match_2]) == target_amount
assert len(match_2) == 2
# check for match of at least 3. it is random after all.
target_amount = uint128(541)
match_3: Set[Coin] = await select_coins(
spendable_amount,
uint64(DEFAULT_CONSTANTS.MAX_COIN_AMOUNT),
coin_list,
{},
logging.getLogger("test"),
target_amount,
)
assert match_3 is not None
assert sum([coin.amount for coin in match_3]) >= target_amount
assert len(match_3) >= 3
# check for match of all
target_amount = spendable_amount
match_all: Set[Coin] = await select_coins(
spendable_amount,
uint64(DEFAULT_CONSTANTS.MAX_COIN_AMOUNT),
coin_list,
{},
logging.getLogger("test"),
target_amount,
)
assert match_all is not None
assert sum([coin.amount for coin in match_all]) == target_amount
assert len(match_all) == len(coin_list)
# test smallest greater than target
greater_coin_amounts = [1, 2, 5, 20, 400, 700]
greater_coin_list: List[WalletCoinRecord] = [
WalletCoinRecord(Coin(a_hash, a_hash, uint64(a)), uint32(1), uint32(1), False, True, WalletType(0), 1)
for a in greater_coin_amounts
]
greater_spendable_amount = uint128(sum(greater_coin_amounts))
target_amount = uint128(625)
smallest_result: Set[Coin] = await select_coins(
greater_spendable_amount,
uint64(DEFAULT_CONSTANTS.MAX_COIN_AMOUNT),
greater_coin_list,
{},
logging.getLogger("test"),
target_amount,
)
assert smallest_result is not None
assert sum([coin.amount for coin in smallest_result]) > target_amount
assert len(smallest_result) == 1
# test smallest greater than target with only 1 large coin.
single_greater_coin_list: List[WalletCoinRecord] = [
WalletCoinRecord(Coin(a_hash, a_hash, uint64(70000)), uint32(1), uint32(1), False, True, WalletType(0), 1)
]
single_greater_spendable_amount = uint128(70000)
target_amount = uint128(50000)
single_greater_result: Set[Coin] = await select_coins(
single_greater_spendable_amount,
uint64(DEFAULT_CONSTANTS.MAX_COIN_AMOUNT),
single_greater_coin_list,
{},
logging.getLogger("test"),
target_amount,
)
assert single_greater_result is not None
assert sum([coin.amount for coin in single_greater_result]) > target_amount
assert len(single_greater_result) == 1
# test smallest greater than target with only multiple larger then target coins.
multiple_greater_coin_amounts = [90000, 100000, 120000, 200000, 100000]
multiple_greater_coin_list: List[WalletCoinRecord] = [
WalletCoinRecord(Coin(a_hash, a_hash, uint64(a)), uint32(1), uint32(1), False, True, WalletType(0), 1)
for a in multiple_greater_coin_amounts
]
multiple_greater_spendable_amount = uint128(sum(multiple_greater_coin_amounts))
target_amount = uint128(70000)
multiple_greater_result: Set[Coin] = await select_coins(
multiple_greater_spendable_amount,
uint64(DEFAULT_CONSTANTS.MAX_COIN_AMOUNT),
multiple_greater_coin_list,
{},
logging.getLogger("test"),
target_amount,
)
assert multiple_greater_result is not None
assert sum([coin.amount for coin in multiple_greater_result]) > target_amount
assert sum([coin.amount for coin in multiple_greater_result]) == 90000
assert len(multiple_greater_result) == 1
@pytest.mark.asyncio
async def test_coin_selection_difficult(self, a_hash: bytes32) -> None:
num_coins = 40
spendable_amount = uint128(num_coins * 1000)
coin_list: List[WalletCoinRecord] = [
WalletCoinRecord(
Coin(a_hash, std_hash(i.to_bytes(4, "big")), uint64(1000)),
uint32(1),
uint32(1),
False,
True,
WalletType(0),
1,
)
for i in range(num_coins)
]
target_amount = spendable_amount - 1
result: Set[Coin] = await select_coins(
spendable_amount,
uint64(DEFAULT_CONSTANTS.MAX_COIN_AMOUNT),
coin_list,
{},
logging.getLogger("test"),
uint128(target_amount),
)
assert result is not None
print(result)
print(sum([c.amount for c in result]))
assert sum([coin.amount for coin in result]) >= target_amount
@pytest.mark.asyncio
async def test_smallest_coin_over_amount(self, a_hash: bytes32) -> None:
coin_list: List[Coin] = [
Coin(a_hash, std_hash(i.to_bytes(4, "big")), uint64((39 - i) * 1000)) for i in range(40)
]
assert select_smallest_coin_over_target(uint128(100), coin_list) == coin_list[39 - 1]
assert select_smallest_coin_over_target(uint128(1000), coin_list) == coin_list[39 - 1]
assert select_smallest_coin_over_target(uint128(1001), coin_list) == coin_list[39 - 2]
assert select_smallest_coin_over_target(uint128(37000), coin_list) == coin_list[39 - 37]
assert select_smallest_coin_over_target(uint128(39000), coin_list) == coin_list[39 - 39]
assert select_smallest_coin_over_target(uint128(39001), coin_list) is None
@pytest.mark.asyncio
async def test_sum_largest_coins(self, a_hash: bytes32) -> None:
coin_list: List[Coin] = list(
reversed([Coin(a_hash, std_hash(i.to_bytes(4, "big")), uint64(i)) for i in range(41)])
)
assert sum_largest_coins(uint128(40), coin_list) == {coin_list[0]}
assert sum_largest_coins(uint128(79), coin_list) == {coin_list[0], coin_list[1]}
assert sum_largest_coins(uint128(40000), coin_list) is None
@pytest.mark.asyncio
async def test_knapsack_perf(self, a_hash: bytes32) -> None:
start = time.time()
coin_list: List[Coin] = [
Coin(a_hash, std_hash(i.to_bytes(4, "big")), uint64((200000 - i) * 1000)) for i in range(200000)
]
knapsack_coin_algorithm(coin_list, uint128(2000000), 9999999999999999, 500)
# Just a sanity check, it's actually much faster than this time
assert time.time() - start < 10000
@pytest.mark.asyncio
async def test_coin_selection_min_coin(self, a_hash: bytes32) -> None:
spendable_amount = uint128(5000000 + 500 + 40050)
coin_list: List[WalletCoinRecord] = [
WalletCoinRecord(Coin(a_hash, a_hash, uint64(5000000)), uint32(1), uint32(1), False, True, WalletType(0), 1)
]
for i in range(500):
coin_list.append(
WalletCoinRecord(
Coin(a_hash, std_hash(i), uint64(1)), uint32(1), uint32(1), False, True, WalletType(0), 1
)
)
for i in range(1, 90):
coin_list.append(
WalletCoinRecord(
Coin(a_hash, std_hash(i), uint64(i * 10)), uint32(1), uint32(1), False, True, WalletType(0), 1
)
)
# make sure coins are not identical.
for target_amount in [500, 1000, 50000, 500000]:
for min_coin_amount in [10, 100, 200, 300, 1000]:
result: Set[Coin] = await select_coins(
spendable_amount,
uint64(DEFAULT_CONSTANTS.MAX_COIN_AMOUNT),
coin_list,
{},
logging.getLogger("test"),
uint128(target_amount),
min_coin_amount=uint64(min_coin_amount),
)
assert result is not None # this should never happen
assert sum(coin.amount for coin in result) >= target_amount
for coin in result:
assert not coin.amount < min_coin_amount
assert len(result) <= 500
@pytest.mark.asyncio
async def test_coin_selection_with_excluded_coins(self) -> None:
a_hash = std_hash(b"a")
b_hash = std_hash(b"b")
c_hash = std_hash(b"c")
target_amount = uint128(2)
spendable_coins = [
Coin(a_hash, a_hash, uint64(3)),
Coin(b_hash, b_hash, uint64(6)),
Coin(c_hash, c_hash, uint64(9)),
]
spendable_amount = uint128(sum(coin.amount for coin in spendable_coins))
spendable_wallet_coin_records = [
WalletCoinRecord(spendable_coin, uint32(1), uint32(1), False, True, WalletType(0), 1)
for spendable_coin in spendable_coins
]
excluded_coins = [Coin(a_hash, a_hash, uint64(3)), Coin(c_hash, c_hash, uint64(9))]
# test that excluded coins are not included in the result
selected_coins: Set[Coin] = await select_coins(
spendable_amount,
uint64(DEFAULT_CONSTANTS.MAX_COIN_AMOUNT),
spendable_wallet_coin_records,
{},
logging.getLogger("test"),
amount=target_amount,
exclude=excluded_coins,
)
assert selected_coins is not None
assert sum([coin.amount for coin in selected_coins]) >= target_amount
assert len(selected_coins) == 1
assert list(selected_coins)[0] == Coin(b_hash, b_hash, uint64(6))
exclude_all_coins = spendable_coins
# make sure that a failure is raised if all coins are excluded.
with pytest.raises(ValueError):
await select_coins(
spendable_amount,
uint64(DEFAULT_CONSTANTS.MAX_COIN_AMOUNT),
spendable_wallet_coin_records,
{},
logging.getLogger("test"),
amount=target_amount,
exclude=exclude_all_coins,
)
@pytest.mark.asyncio
async def test_coin_selection_with_zero_amount(self, a_hash: bytes32) -> None:
coin_amounts = [3, 6, 20, 40, 80, 150, 160, 203, 202, 201, 320]
coin_list: List[WalletCoinRecord] = [
WalletCoinRecord(Coin(a_hash, a_hash, uint64(a)), uint32(1), uint32(1), False, True, WalletType(0), 1)
for a in coin_amounts
]
spendable_amount = uint128(sum(coin_amounts))
# validate that a zero amount is handled correctly
target_amount = uint128(0)
zero_amount_result: Set[Coin] = await select_coins(
spendable_amount,
uint64(DEFAULT_CONSTANTS.MAX_COIN_AMOUNT),
coin_list,
{},
logging.getLogger("test"),
target_amount,
)
assert zero_amount_result is not None
assert sum([coin.amount for coin in zero_amount_result]) >= target_amount
assert len(zero_amount_result) == 1
# make sure that a failure is properly raised if we don't have any coins.
with pytest.raises(ValueError):
await select_coins(
uint128(0),
uint64(DEFAULT_CONSTANTS.MAX_COIN_AMOUNT),
[],
{},
logging.getLogger("test"),
target_amount,
)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/__init__.py | tests/wallet/__init__.py | python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false | |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/test_notifications.py | tests/wallet/test_notifications.py | from __future__ import annotations
from secrets import token_bytes
from typing import Any
import pytest
from flax.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward
from flax.simulator.full_node_simulator import FullNodeSimulator
from flax.simulator.simulator_protocol import FarmNewBlockProtocol
from flax.simulator.time_out_assert import time_out_assert, time_out_assert_not_none
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.peer_info import PeerInfo
from flax.util.ints import uint16, uint32, uint64
# from clvm_tools.binutils import disassemble
from tests.util.wallet_is_synced import wallets_are_synced
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
# @pytest.mark.skip
async def test_notifications(two_wallet_nodes: Any, trusted: Any) -> None:
full_nodes, wallets, _ = two_wallet_nodes
full_node_api: FullNodeSimulator = full_nodes[0]
full_node_server = full_node_api.server
wallet_node_1, server_0 = wallets[0]
wallet_node_2, server_1 = wallets[1]
wsm_1 = wallet_node_1.wallet_state_manager
wsm_2 = wallet_node_2.wallet_state_manager
wallet_1 = wsm_1.main_wallet
wallet_2 = wsm_2.main_wallet
ph_1 = await wallet_1.get_new_puzzlehash()
ph_2 = await wallet_2.get_new_puzzlehash()
ph_token = bytes32(token_bytes())
if trusted:
wallet_node_1.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
wallet_node_2.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
else:
wallet_node_1.config["trusted_peers"] = {}
wallet_node_2.config["trusted_peers"] = {}
await server_0.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_1.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
for i in range(0, 1):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_1))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await time_out_assert(30, wallets_are_synced, True, [wallet_node_1, wallet_node_2], full_node_api)
funds_1 = sum([calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, 2)])
funds_2 = 0
await time_out_assert(30, wallet_1.get_unconfirmed_balance, funds_1)
await time_out_assert(30, wallet_1.get_confirmed_balance, funds_1)
notification_manager_1 = wsm_1.notification_manager
notification_manager_2 = wsm_2.notification_manager
for case in ("block all", "block too low", "allow", "allow_larger"):
if case == "block all":
wallet_node_2.config["required_notification_amount"] = 100
AMOUNT = uint64(100)
FEE = uint64(0)
elif case == "block too low":
wallet_node_2.config["accept_notifications"] = True
AMOUNT = uint64(1)
FEE = uint64(0)
elif case in ("allow", "allow_larger"):
wallet_node_2.config["required_notification_amount"] = 750000000000
if case == "allow_larger":
AMOUNT = uint64(1000000000000)
else:
AMOUNT = uint64(750000000000)
FEE = uint64(1)
tx = await notification_manager_1.send_new_notification(ph_2, bytes(case, "utf8"), AMOUNT, fee=FEE)
await wsm_1.add_pending_transaction(tx)
await time_out_assert_not_none(
5,
full_node_api.full_node.mempool_manager.get_spendbundle,
tx.spend_bundle.name(),
)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
funds_1 = funds_1 - AMOUNT - FEE
funds_2 += AMOUNT
await time_out_assert(30, wallet_1.get_unconfirmed_balance, funds_1)
await time_out_assert(30, wallet_1.get_confirmed_balance, funds_1)
await time_out_assert(30, wallet_2.get_unconfirmed_balance, funds_2)
await time_out_assert(30, wallet_2.get_confirmed_balance, funds_2)
notifications = await notification_manager_2.notification_store.get_all_notifications(pagination=(0, 2))
assert len(notifications) == 2
assert notifications[0].message == b"allow_larger"
notifications = await notification_manager_2.notification_store.get_all_notifications(pagination=(1, None))
assert len(notifications) == 1
assert notifications[0].message == b"allow"
notifications = await notification_manager_2.notification_store.get_all_notifications(pagination=(0, 1))
assert len(notifications) == 1
assert notifications[0].message == b"allow_larger"
notifications = await notification_manager_2.notification_store.get_all_notifications(pagination=(None, 1))
assert len(notifications) == 1
assert notifications[0].message == b"allow_larger"
assert (
await notification_manager_2.notification_store.get_notifications([n.coin_id for n in notifications])
== notifications
)
await notification_manager_2.notification_store.delete_all_notifications()
assert len(await notification_manager_2.notification_store.get_all_notifications()) == 0
await notification_manager_2.notification_store.add_notification(notifications[0])
await notification_manager_2.notification_store.delete_notifications([n.coin_id for n in notifications])
assert len(await notification_manager_2.notification_store.get_all_notifications()) == 0
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/test_address_type.py | tests/wallet/test_address_type.py | from __future__ import annotations
from typing import Any, Dict
import pytest
from flax.wallet.util.address_type import AddressType, ensure_valid_address, is_valid_address
@pytest.mark.parametrize("prefix", [None])
def test_xfx_hrp_for_default_config(config_with_address_prefix: Dict[str, Any]) -> None:
config = config_with_address_prefix
assert AddressType.XFX.hrp(config) == "xfx"
@pytest.mark.parametrize("prefix", ["txfx"])
def test_txfx_hrp_for_testnet(config_with_address_prefix: Dict[str, Any]) -> None:
config = config_with_address_prefix
assert AddressType.XFX.hrp(config) == "txfx"
@pytest.mark.parametrize("prefix", [None])
def test_is_valid_address_xfx(config_with_address_prefix: Dict[str, Any]) -> None:
config = config_with_address_prefix
valid = is_valid_address(
"xfx1mnr0ygu7lvmk3nfgzmncfk39fwu0dv933yrcv97nd6pmrt7fzmhs8taffd", allowed_types={AddressType.XFX}, config=config
)
assert valid is True
@pytest.mark.parametrize("prefix", ["txfx"])
def test_is_valid_address_txfx(config_with_address_prefix: Dict[str, Any]) -> None:
config = config_with_address_prefix
# TXFX address validation requires a config
valid = is_valid_address(
"txfx1mnr0ygu7lvmk3nfgzmncfk39fwu0dv933yrcv97nd6pmrt7fzmhs2v6lg7",
allowed_types={AddressType.XFX},
config=config,
)
assert valid is True
@pytest.mark.parametrize("prefix", [None])
def test_is_valid_address_xfx_bad_address(config_with_address_prefix: Dict[str, Any]) -> None:
config = config_with_address_prefix
valid = is_valid_address(
"xfx1mnr0ygu7lvmk3nfgzmncfk39fwu0dv933yrcv97nd6pmrt7fzmhs8xxxxx", allowed_types={AddressType.XFX}, config=config
)
assert valid is False
@pytest.mark.parametrize("prefix", [None])
def test_is_valid_address_nft(config_with_address_prefix: Dict[str, Any]) -> None:
config = config_with_address_prefix
valid = is_valid_address(
"nft1mx2nkvml2eekjtqwdmxvmf3js8g083hpszzhkhtwvhcss8efqzhqtza773", allowed_types={AddressType.NFT}, config=config
)
assert valid is True
@pytest.mark.parametrize("prefix", ["txfx"])
def test_is_valid_address_nft_with_testnet(config_with_address_prefix: Dict[str, Any]) -> None:
config = config_with_address_prefix
valid = is_valid_address(
"nft1mx2nkvml2eekjtqwdmxvmf3js8g083hpszzhkhtwvhcss8efqzhqtza773", allowed_types={AddressType.NFT}, config=config
)
assert valid is True
@pytest.mark.parametrize("prefix", [None])
def test_is_valid_address_nft_bad_address(config_with_address_prefix: Dict[str, Any]) -> None:
config = config_with_address_prefix
valid = is_valid_address(
"nft1mx2nkvml2eekjtqwdmxvmf3js8g083hpszzhkhtwvhcss8efqzhqtxxxxx", allowed_types={AddressType.NFT}, config=config
)
assert valid is False
@pytest.mark.parametrize("prefix", [None])
def test_is_valid_address_did(config_with_address_prefix: Dict[str, Any]) -> None:
config = config_with_address_prefix
valid = is_valid_address(
"did:flax:14jxdtqcyp3gk8ka0678eq8mmtnktgpmp2vuqq3vtsl2e5qr7fyrsr9gsr7",
allowed_types={AddressType.DID},
config=config,
)
assert valid is True
@pytest.mark.parametrize("prefix", ["txfx"])
def test_is_valid_address_did_with_testnet(config_with_address_prefix: Dict[str, Any]) -> None:
config = config_with_address_prefix
valid = is_valid_address(
"did:flax:14jxdtqcyp3gk8ka0678eq8mmtnktgpmp2vuqq3vtsl2e5qr7fyrsr9gsr7",
allowed_types={AddressType.DID},
config=config,
)
assert valid is True
@pytest.mark.parametrize("prefix", [None])
def test_is_valid_address_did_bad_address(config_with_address_prefix: Dict[str, Any]) -> None:
config = config_with_address_prefix
valid = is_valid_address(
"did:flax:14jxdtqcyp3gk8ka0678eq8mmtnktgpmp2vuqq3vtsl2e5qr7fyrsrxxxxx",
allowed_types={AddressType.DID},
config=config,
)
assert valid is False
@pytest.mark.parametrize("prefix", [None])
def test_ensure_valid_address_xfx(config_with_address_prefix: Dict[str, Any]) -> None:
config = config_with_address_prefix
address = ensure_valid_address(
"xfx1mnr0ygu7lvmk3nfgzmncfk39fwu0dv933yrcv97nd6pmrt7fzmhs8taffd", allowed_types={AddressType.XFX}, config=config
)
assert address == "xfx1mnr0ygu7lvmk3nfgzmncfk39fwu0dv933yrcv97nd6pmrt7fzmhs8taffd"
@pytest.mark.parametrize("prefix", ["txfx"])
def test_ensure_valid_address_txfx(config_with_address_prefix: Dict[str, Any]) -> None:
config = config_with_address_prefix
address = ensure_valid_address(
"txfx1mnr0ygu7lvmk3nfgzmncfk39fwu0dv933yrcv97nd6pmrt7fzmhs2v6lg7",
allowed_types={AddressType.XFX},
config=config,
)
assert address == "txfx1mnr0ygu7lvmk3nfgzmncfk39fwu0dv933yrcv97nd6pmrt7fzmhs2v6lg7"
@pytest.mark.parametrize("prefix", [None])
def test_ensure_valid_address_xfx_bad_address(config_with_address_prefix: Dict[str, Any]) -> None:
config = config_with_address_prefix
with pytest.raises(ValueError):
ensure_valid_address(
"xfx1mnr0ygu7lvmk3nfgzmncfk39fwu0dv933yrcv97nd6pmrt7fzmhs8xxxxx",
allowed_types={AddressType.XFX},
config=config,
)
@pytest.mark.parametrize("prefix", [None])
def test_ensure_valid_address_nft(config_with_address_prefix: Dict[str, Any]) -> None:
config = config_with_address_prefix
address = ensure_valid_address(
"nft1mx2nkvml2eekjtqwdmxvmf3js8g083hpszzhkhtwvhcss8efqzhqtza773", allowed_types={AddressType.NFT}, config=config
)
assert address == "nft1mx2nkvml2eekjtqwdmxvmf3js8g083hpszzhkhtwvhcss8efqzhqtza773"
@pytest.mark.parametrize("prefix", [None])
def test_ensure_valid_address_nft_bad_address(config_with_address_prefix: Dict[str, Any]) -> None:
config = config_with_address_prefix
with pytest.raises(ValueError):
ensure_valid_address(
"nft1mx2nkvml2eekjtqwdmxvmf3js8g083hpszzhkhtwvhcss8efqzhqtxxxxx",
allowed_types={AddressType.NFT},
config=config,
)
@pytest.mark.parametrize("prefix", [None])
def test_ensure_valid_address_did(config_with_address_prefix: Dict[str, Any]) -> None:
config = config_with_address_prefix
address = ensure_valid_address(
"did:flax:14jxdtqcyp3gk8ka0678eq8mmtnktgpmp2vuqq3vtsl2e5qr7fyrsr9gsr7",
allowed_types={AddressType.DID},
config=config,
)
assert address == "did:flax:14jxdtqcyp3gk8ka0678eq8mmtnktgpmp2vuqq3vtsl2e5qr7fyrsr9gsr7"
@pytest.mark.parametrize("prefix", [None])
def test_ensure_valid_address_did_bad_address(config_with_address_prefix: Dict[str, Any]) -> None:
config = config_with_address_prefix
with pytest.raises(ValueError):
ensure_valid_address(
"did:flax:14jxdtqcyp3gk8ka0678eq8mmtnktgpmp2vuqq3vtsl2e5qr7fyrsrxxxxx",
allowed_types={AddressType.DID},
config=config,
)
@pytest.mark.parametrize("prefix", [None])
def test_ensure_valid_address_bad_length(config_with_address_prefix: Dict[str, Any]) -> None:
config = config_with_address_prefix
with pytest.raises(ValueError):
ensure_valid_address("xfx1qqqqqqqqqqqqqqqqwygzk5", allowed_types={AddressType.XFX}, config=config)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/test_puzzle_store.py | tests/wallet/test_puzzle_store.py | from __future__ import annotations
from secrets import token_bytes
import pytest
from blspy import AugSchemeMPL
from flax.util.ints import uint32
from flax.wallet.derivation_record import DerivationRecord
from flax.wallet.util.wallet_types import WalletType
from flax.wallet.wallet_puzzle_store import WalletPuzzleStore
from tests.util.db_connection import DBConnection
class TestPuzzleStore:
@pytest.mark.asyncio
async def test_puzzle_store(self):
async with DBConnection(1) as wrapper:
db = await WalletPuzzleStore.create(wrapper)
derivation_recs = []
# wallet_types = [t for t in WalletType]
[t for t in WalletType]
for i in range(1000):
derivation_recs.append(
DerivationRecord(
uint32(i),
token_bytes(32),
AugSchemeMPL.key_gen(token_bytes(32)).get_g1(),
WalletType.STANDARD_WALLET,
uint32(1),
False,
)
)
derivation_recs.append(
DerivationRecord(
uint32(i),
token_bytes(32),
AugSchemeMPL.key_gen(token_bytes(32)).get_g1(),
WalletType.CAT,
uint32(2),
False,
)
)
assert await db.puzzle_hash_exists(derivation_recs[0].puzzle_hash) is False
assert await db.index_for_pubkey(derivation_recs[0].pubkey) is None
assert await db.index_for_puzzle_hash(derivation_recs[2].puzzle_hash) is None
assert await db.wallet_info_for_puzzle_hash(derivation_recs[2].puzzle_hash) is None
assert len((await db.get_all_puzzle_hashes())) == 0
assert await db.get_last_derivation_path() is None
assert await db.get_unused_derivation_path() is None
assert await db.get_derivation_record(0, 2, False) is None
await db.add_derivation_paths(derivation_recs)
assert await db.puzzle_hash_exists(derivation_recs[0].puzzle_hash) is True
assert await db.index_for_pubkey(derivation_recs[4].pubkey) == 2
assert await db.index_for_puzzle_hash(derivation_recs[2].puzzle_hash) == 1
assert await db.wallet_info_for_puzzle_hash(derivation_recs[2].puzzle_hash) == (
derivation_recs[2].wallet_id,
derivation_recs[2].wallet_type,
)
assert len((await db.get_all_puzzle_hashes())) == 2000
assert await db.get_last_derivation_path() == 999
assert await db.get_unused_derivation_path() == 0
assert await db.get_derivation_record(0, 2, False) == derivation_recs[1]
# Indeces up to 250
await db.set_used_up_to(249)
assert await db.get_unused_derivation_path() == 250
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/test_wallet_user_store.py | tests/wallet/test_wallet_user_store.py | from __future__ import annotations
import pytest
from flax.wallet.util.wallet_types import WalletType
from flax.wallet.wallet_user_store import WalletUserStore
from tests.util.db_connection import DBConnection
@pytest.mark.asyncio
async def test_store():
async with DBConnection(1) as db_wrapper:
store = await WalletUserStore.create(db_wrapper)
await store.init_wallet()
wallet = None
for i in range(1, 5):
assert (await store.get_last_wallet()).id == i
wallet = await store.create_wallet("CAT_WALLET", WalletType.CAT, "abc")
assert wallet.id == i + 1
assert wallet.id == 5
for i in range(2, 6):
await store.delete_wallet(i)
assert (await store.get_last_wallet()).id == 1
wallet = await store.create_wallet("CAT_WALLET", WalletType.CAT, "abc")
# Due to autoincrement, we don't reuse IDs
assert (await store.get_last_wallet()).id == 6
assert wallet.id == 6
assert (await store.get_wallet_by_id(7)) is None
assert (await store.get_wallet_by_id(6)) == wallet
assert await store.get_last_wallet() == wallet
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/test_wallet_blockchain.py | tests/wallet/test_wallet_blockchain.py | import dataclasses
import pytest
from flax.consensus.blockchain import ReceiveBlockResult
from flax.protocols import full_node_protocol
from flax.types.blockchain_format.vdf import VDFProof
from flax.types.weight_proof import WeightProof
from flax.util.generator_tools import get_block_header
from flax.wallet.key_val_store import KeyValStore
from flax.wallet.wallet_blockchain import WalletBlockchain
from tests.setup_nodes import test_constants
from tests.util.db_connection import DBConnection
class TestWalletBlockchain:
@pytest.mark.asyncio
async def test_wallet_blockchain(self, wallet_node, default_1000_blocks):
full_node_api, wallet_node, full_node_server, wallet_server, _ = wallet_node
for block in default_1000_blocks[:600]:
await full_node_api.full_node.respond_block(full_node_protocol.RespondBlock(block))
res = await full_node_api.request_proof_of_weight(
full_node_protocol.RequestProofOfWeight(
default_1000_blocks[499].height + 1, default_1000_blocks[499].header_hash
)
)
res_2 = await full_node_api.request_proof_of_weight(
full_node_protocol.RequestProofOfWeight(
default_1000_blocks[460].height + 1, default_1000_blocks[460].header_hash
)
)
res_3 = await full_node_api.request_proof_of_weight(
full_node_protocol.RequestProofOfWeight(
default_1000_blocks[505].height + 1, default_1000_blocks[505].header_hash
)
)
weight_proof: WeightProof = full_node_protocol.RespondProofOfWeight.from_bytes(res.data).wp
success, _, records = await wallet_node._weight_proof_handler.validate_weight_proof(weight_proof, True)
weight_proof_short: WeightProof = full_node_protocol.RespondProofOfWeight.from_bytes(res_2.data).wp
success, _, records_short = await wallet_node._weight_proof_handler.validate_weight_proof(
weight_proof_short, True
)
weight_proof_long: WeightProof = full_node_protocol.RespondProofOfWeight.from_bytes(res_3.data).wp
success, _, records_long = await wallet_node._weight_proof_handler.validate_weight_proof(
weight_proof_long, True
)
async with DBConnection(1) as db_wrapper:
store = await KeyValStore.create(db_wrapper)
chain = await WalletBlockchain.create(store, test_constants)
assert (await chain.get_peak_block()) is None
assert chain.get_latest_timestamp() == 0
await chain.new_valid_weight_proof(weight_proof, records)
assert (await chain.get_peak_block()) is not None
assert (await chain.get_peak_block()).height == 499
assert chain.get_latest_timestamp() > 0
await chain.new_valid_weight_proof(weight_proof_short, records_short)
assert (await chain.get_peak_block()).height == 499
await chain.new_valid_weight_proof(weight_proof_long, records_long)
assert (await chain.get_peak_block()).height == 505
header_blocks = []
for block in default_1000_blocks:
header_block = get_block_header(block, [], [])
header_blocks.append(header_block)
res, err = await chain.receive_block(header_blocks[50])
print(res, err)
assert res == ReceiveBlockResult.DISCONNECTED_BLOCK
res, err = await chain.receive_block(header_blocks[400])
print(res, err)
assert res == ReceiveBlockResult.ALREADY_HAVE_BLOCK
res, err = await chain.receive_block(header_blocks[507])
print(res, err)
assert res == ReceiveBlockResult.DISCONNECTED_BLOCK
res, err = await chain.receive_block(
dataclasses.replace(header_blocks[506], challenge_chain_ip_proof=VDFProof(2, b"123", True))
)
assert res == ReceiveBlockResult.INVALID_BLOCK
assert (await chain.get_peak_block()).height == 505
for block in header_blocks[506:]:
res, err = await chain.receive_block(block)
assert res == ReceiveBlockResult.NEW_PEAK
assert (await chain.get_peak_block()).height == block.height
assert (await chain.get_peak_block()).height == 999
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/test_singleton.py | tests/wallet/test_singleton.py | from clvm_tools import binutils
from flax.types.blockchain_format.program import Program, INFINITE_COST
from flax.types.announcement import Announcement
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.condition_tools import parse_sexp_to_conditions
from flax.wallet.puzzles.load_clvm import load_clvm
SINGLETON_MOD = load_clvm("singleton_top_layer.clvm")
LAUNCHER_PUZZLE = load_clvm("singleton_launcher.clvm")
P2_SINGLETON_MOD = load_clvm("p2_singleton.clvm")
POOL_MEMBER_MOD = load_clvm("pool_member_innerpuz.clvm")
POOL_WAITINGROOM_MOD = load_clvm("pool_waitingroom_innerpuz.clvm")
LAUNCHER_PUZZLE_HASH = LAUNCHER_PUZZLE.get_tree_hash()
SINGLETON_MOD_HASH = SINGLETON_MOD.get_tree_hash()
LAUNCHER_ID = Program.to(b"launcher-id").get_tree_hash()
POOL_REWARD_PREFIX_MAINNET = bytes32.fromhex("ccd5bb71183532bff220ba46c268991a00000000000000000000000000000000")
def singleton_puzzle(launcher_id: Program, launcher_puzzle_hash: bytes32, inner_puzzle: Program) -> Program:
return SINGLETON_MOD.curry((SINGLETON_MOD_HASH, (launcher_id, launcher_puzzle_hash)), inner_puzzle)
def p2_singleton_puzzle(launcher_id: Program, launcher_puzzle_hash: bytes32) -> Program:
return P2_SINGLETON_MOD.curry(SINGLETON_MOD_HASH, launcher_id, launcher_puzzle_hash)
def singleton_puzzle_hash(launcher_id: Program, launcher_puzzle_hash: bytes32, inner_puzzle: Program) -> bytes32:
return singleton_puzzle(launcher_id, launcher_puzzle_hash, inner_puzzle).get_tree_hash()
def p2_singleton_puzzle_hash(launcher_id: Program, launcher_puzzle_hash: bytes32) -> bytes32:
return p2_singleton_puzzle(launcher_id, launcher_puzzle_hash).get_tree_hash()
def test_only_odd_coins():
singleton_mod_hash = SINGLETON_MOD.get_tree_hash()
# (SINGLETON_STRUCT INNER_PUZZLE lineage_proof my_amount inner_solution)
# SINGLETON_STRUCT = (MOD_HASH . (LAUNCHER_ID . LAUNCHER_PUZZLE_HASH))
solution = Program.to(
[
(singleton_mod_hash, (LAUNCHER_ID, LAUNCHER_PUZZLE_HASH)),
Program.to(binutils.assemble("(q (51 0xcafef00d 200))")),
[0xDEADBEEF, 0xCAFEF00D, 200],
200,
[],
]
)
try:
cost, result = SINGLETON_MOD.run_with_cost(INFINITE_COST, solution)
except Exception as e:
assert e.args == ("clvm raise", "80")
else:
assert False
solution = Program.to(
[
(singleton_mod_hash, (LAUNCHER_ID, LAUNCHER_PUZZLE_HASH)),
Program.to(binutils.assemble("(q (51 0xcafef00d 201))")),
[0xDEADBEEF, 0xCAFED00D, 210],
205,
0,
]
)
try:
cost, result = SINGLETON_MOD.run_with_cost(INFINITE_COST, solution)
except Exception:
assert False
def test_only_one_odd_coin_created():
singleton_mod_hash = SINGLETON_MOD.get_tree_hash()
solution = Program.to(
[
(singleton_mod_hash, (LAUNCHER_ID, LAUNCHER_PUZZLE_HASH)),
Program.to(binutils.assemble("(q (51 0xcafef00d 203) (51 0xfadeddab 205))")),
[0xDEADBEEF, 0xCAFEF00D, 411],
411,
[],
]
)
try:
cost, result = SINGLETON_MOD.run_with_cost(INFINITE_COST, solution)
except Exception as e:
assert e.args == ("clvm raise", "80")
else:
assert False
solution = Program.to(
[
(singleton_mod_hash, (LAUNCHER_ID, LAUNCHER_PUZZLE_HASH)),
Program.to(binutils.assemble("(q (51 0xcafef00d 203) (51 0xfadeddab 204) (51 0xdeadbeef 202))")),
[0xDEADBEEF, 0xCAFEF00D, 411],
411,
[],
]
)
try:
cost, result = SINGLETON_MOD.run_with_cost(INFINITE_COST, solution)
except Exception:
assert False
def test_p2_singleton():
# create a singleton. This should call driver code.
launcher_id = LAUNCHER_ID
innerpuz = Program.to(1)
singleton_full_puzzle = singleton_puzzle(launcher_id, LAUNCHER_PUZZLE_HASH, innerpuz)
# create a fake coin id for the `p2_singleton`
p2_singleton_coin_id = Program.to(["test_hash"]).get_tree_hash()
expected_announcement = Announcement(singleton_full_puzzle.get_tree_hash(), p2_singleton_coin_id).name()
# create a `p2_singleton` puzzle. This should call driver code.
p2_singleton_full = p2_singleton_puzzle(launcher_id, LAUNCHER_PUZZLE_HASH)
solution = Program.to([innerpuz.get_tree_hash(), p2_singleton_coin_id])
cost, result = p2_singleton_full.run_with_cost(INFINITE_COST, solution)
err, conditions = parse_sexp_to_conditions(result)
assert err is None
p2_singleton_full = p2_singleton_puzzle(launcher_id, LAUNCHER_PUZZLE_HASH)
solution = Program.to([innerpuz.get_tree_hash(), p2_singleton_coin_id])
cost, result = p2_singleton_full.run_with_cost(INFINITE_COST, solution)
assert result.first().rest().first().as_atom() == expected_announcement
assert conditions[0].vars[0] == expected_announcement
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/test_nft_store.py | tests/wallet/test_nft_store.py | from __future__ import annotations
import pytest
from flax.types.blockchain_format.coin import Coin
from flax.types.blockchain_format.program import Program
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.ints import uint32, uint64
from flax.wallet.lineage_proof import LineageProof
from flax.wallet.nft_wallet.nft_info import NFTCoinInfo
from flax.wallet.wallet_nft_store import WalletNftStore
from tests.util.db_connection import DBConnection
class TestNftStore:
@pytest.mark.asyncio
async def test_nft_insert(self) -> None:
async with DBConnection(1) as wrapper:
db = await WalletNftStore.create(wrapper)
a_bytes32 = bytes32.fromhex("09287c75377c63fd6a3a4d6658abed03e9a521e0436b1f83cdf4af99341ce8f1")
b_bytes32 = bytes32.fromhex("09287c75377c63fd6a3a4d6658abed03e9a521e0436b1f83cdf4af99341ce8f2")
puzzle = Program.to(["A Test puzzle"])
nft = NFTCoinInfo(
a_bytes32,
Coin(a_bytes32, a_bytes32, uint64(1)),
LineageProof(a_bytes32, a_bytes32, uint64(1)),
puzzle,
uint32(1),
None,
uint32(10),
)
nft2 = NFTCoinInfo(
b_bytes32,
Coin(b_bytes32, b_bytes32, uint64(1)),
LineageProof(a_bytes32, a_bytes32, uint64(1)),
puzzle,
uint32(1),
None,
uint32(10),
)
# Test save
await db.save_nft(uint32(1), a_bytes32, nft)
await db.save_nft(uint32(1), a_bytes32, nft) # test for duplicates
await db.save_nft(uint32(1), b_bytes32, nft2)
# Test get nft
assert await db.count() == 2
assert nft == (await db.get_nft_list(wallet_id=uint32(1)))[0]
assert nft == (await db.get_nft_list())[0]
assert nft == (await db.get_nft_list(did_id=a_bytes32))[0]
assert nft == (await db.get_nft_list(wallet_id=uint32(1), did_id=a_bytes32))[0]
assert nft == await db.get_nft_by_id(a_bytes32)
assert nft == (await db.get_nft_by_coin_id(nft.coin.name()))
assert await db.exists(nft.coin.name())
# negative tests
assert (await db.get_nft_by_coin_id(bytes32(b"0" * 32))) is None
assert not await db.exists(bytes32(b"0" * 32))
@pytest.mark.asyncio
async def test_nft_remove(self) -> None:
async with DBConnection(1) as wrapper:
db = await WalletNftStore.create(wrapper)
a_bytes32 = bytes32.fromhex("09287c75377c63fd6a3a4d6658abed03e9a521e0436b1f83cdf4af99341ce8f1")
puzzle = Program.to(["A Test puzzle"])
coin = Coin(a_bytes32, a_bytes32, uint64(1))
nft = NFTCoinInfo(
a_bytes32,
coin,
LineageProof(a_bytes32, a_bytes32, uint64(1)),
puzzle,
uint32(1),
a_bytes32,
uint32(10),
)
# Test save
await db.save_nft(uint32(1), a_bytes32, nft)
# Test delete by nft id
await db.delete_nft_by_nft_id(a_bytes32, uint32(11))
assert await db.get_nft_by_id(a_bytes32) is None
# Test delete by coin id
await db.save_nft(uint32(1), a_bytes32, nft)
assert not await db.delete_nft_by_coin_id(a_bytes32, uint32(11))
assert await db.delete_nft_by_coin_id(coin.name(), uint32(11))
assert not await db.delete_nft_by_coin_id(a_bytes32, uint32(11))
assert not await db.exists(a_bytes32)
@pytest.mark.asyncio
async def test_nft_reorg(self) -> None:
async with DBConnection(1) as wrapper:
db = await WalletNftStore.create(wrapper)
a_bytes32 = bytes32.fromhex("09287c75377c63fd6a3a4d6658abed03e9a521e0436b1f83cdf4af99341ce8f0")
nft_id_1 = bytes32.fromhex("09287c75377c63fd6a3a4d6658abed03e9a521e0436b1f83cdf4af99341ce8f1")
coin_id_1 = bytes32.fromhex("09287c75377c63fd6a3a4d6658abed03e9a521e0436b1f83cdf4af99341ce8f2")
nft_id_2 = bytes32.fromhex("09287c75377c63fd6a3a4d6658abed03e9a521e0436b1f83cdf4af99341ce8f3")
coin_id_2 = bytes32.fromhex("09287c75377c63fd6a3a4d6658abed03e9a521e0436b1f83cdf4af99341ce8f4")
puzzle = Program.to(["A Test puzzle"])
nft = NFTCoinInfo(
nft_id_1,
Coin(coin_id_1, coin_id_1, uint64(1)),
LineageProof(coin_id_1, coin_id_1, uint64(1)),
puzzle,
uint32(1),
a_bytes32,
uint32(10),
)
# Test save
await db.save_nft(uint32(1), nft_id_1, nft)
# Test delete
await db.delete_nft_by_nft_id(nft_id_1, uint32(11))
assert await db.get_nft_by_id(nft_id_1) is None
# Test reorg
nft1 = NFTCoinInfo(
nft_id_2,
Coin(coin_id_2, coin_id_2, uint64(1)),
LineageProof(coin_id_2, coin_id_2, uint64(1)),
puzzle,
uint32(1),
a_bytes32,
uint32(12),
)
await db.save_nft(uint32(1), nft_id_1, nft1)
assert nft1 == (await db.get_nft_list(wallet_id=uint32(1)))[0]
assert await db.rollback_to_block(10)
assert nft == (await db.get_nft_list(wallet_id=uint32(1)))[0]
assert not (await db.get_nft_by_coin_id(coin_id_2))
assert not (await db.get_nft_by_coin_id(nft_id_1))
assert not await db.exists(coin_id_2)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/test_wallet_coin_store.py | tests/wallet/test_wallet_coin_store.py | from __future__ import annotations
from secrets import token_bytes
import pytest
from flax.types.blockchain_format.coin import Coin
from flax.util.ints import uint32, uint64
from flax.wallet.util.wallet_types import WalletType
from flax.wallet.wallet_coin_record import WalletCoinRecord
from flax.wallet.wallet_coin_store import WalletCoinStore
from tests.util.db_connection import DBConnection
coin_1 = Coin(token_bytes(32), token_bytes(32), uint64(12312))
coin_2 = Coin(coin_1.parent_coin_info, token_bytes(32), uint64(12311))
coin_3 = Coin(token_bytes(32), token_bytes(32), uint64(12312))
coin_4 = Coin(token_bytes(32), token_bytes(32), uint64(12312))
coin_5 = Coin(token_bytes(32), token_bytes(32), uint64(12312))
coin_6 = Coin(token_bytes(32), coin_4.puzzle_hash, uint64(12312))
coin_7 = Coin(token_bytes(32), token_bytes(32), uint64(12312))
record_replaced = WalletCoinRecord(coin_1, uint32(8), uint32(0), False, True, WalletType.STANDARD_WALLET, 0)
record_1 = WalletCoinRecord(coin_1, uint32(4), uint32(0), False, True, WalletType.STANDARD_WALLET, 0)
record_2 = WalletCoinRecord(coin_2, uint32(5), uint32(0), False, True, WalletType.STANDARD_WALLET, 0)
record_3 = WalletCoinRecord(
coin_3,
uint32(5),
uint32(10),
True,
False,
WalletType.STANDARD_WALLET,
0,
)
record_4 = WalletCoinRecord(
coin_4,
uint32(5),
uint32(15),
True,
False,
WalletType.STANDARD_WALLET,
0,
)
record_5 = WalletCoinRecord(
coin_5,
uint32(5),
uint32(0),
False,
False,
WalletType.STANDARD_WALLET,
1,
)
record_6 = WalletCoinRecord(
coin_6,
uint32(5),
uint32(15),
True,
False,
WalletType.STANDARD_WALLET,
2,
)
record_7 = WalletCoinRecord(
coin_7,
uint32(5),
uint32(0),
False,
False,
WalletType.POOLING_WALLET,
2,
)
@pytest.mark.asyncio
async def test_add_replace_get() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletCoinStore.create(db_wrapper)
assert await store.get_coin_record(coin_1.name()) is None
await store.add_coin_record(record_1)
# adding duplicates is fine, we replace existing entry
await store.add_coin_record(record_replaced)
await store.add_coin_record(record_2)
await store.add_coin_record(record_3)
await store.add_coin_record(record_4)
assert await store.get_coin_record(coin_1.name()) == record_replaced
@pytest.mark.asyncio
async def test_persistance() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletCoinStore.create(db_wrapper)
await store.add_coin_record(record_1)
store = await WalletCoinStore.create(db_wrapper)
assert await store.get_coin_record(coin_1.name()) == record_1
@pytest.mark.asyncio
async def test_bulk_get() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletCoinStore.create(db_wrapper)
await store.add_coin_record(record_1)
await store.add_coin_record(record_2)
await store.add_coin_record(record_3)
await store.add_coin_record(record_4)
store = await WalletCoinStore.create(db_wrapper)
records = await store.get_coin_records([coin_1.name(), coin_2.name(), token_bytes(32), coin_4.name()])
assert records == [record_1, record_2, None, record_4]
@pytest.mark.asyncio
async def test_set_spent() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletCoinStore.create(db_wrapper)
await store.add_coin_record(record_1)
assert not (await store.get_coin_record(coin_1.name())).spent
await store.set_spent(coin_1.name(), uint32(12))
assert (await store.get_coin_record(coin_1.name())).spent
assert (await store.get_coin_record(coin_1.name())).spent_block_height == 12
@pytest.mark.asyncio
async def test_get_records_by_puzzle_hash() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletCoinStore.create(db_wrapper)
await store.add_coin_record(record_4)
await store.add_coin_record(record_5)
# adding duplicates is fine, we replace existing entry
await store.add_coin_record(record_5)
await store.add_coin_record(record_6)
assert len(await store.get_coin_records_by_puzzle_hash(record_6.coin.puzzle_hash)) == 2 # 4 and 6
assert len(await store.get_coin_records_by_puzzle_hash(token_bytes(32))) == 0
assert await store.get_coin_record(coin_6.name()) == record_6
assert await store.get_coin_record(token_bytes(32)) is None
@pytest.mark.asyncio
async def test_get_unspent_coins_for_wallet() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletCoinStore.create(db_wrapper)
assert await store.get_unspent_coins_for_wallet(1) == set()
await store.add_coin_record(record_4) # this is spent and wallet 0
await store.add_coin_record(record_5) # wallet 1
await store.add_coin_record(record_6) # this is spent and wallet 2
await store.add_coin_record(record_7) # wallet 2
assert await store.get_unspent_coins_for_wallet(1) == set([record_5])
assert await store.get_unspent_coins_for_wallet(2) == set([record_7])
assert await store.get_unspent_coins_for_wallet(3) == set()
await store.set_spent(coin_4.name(), uint32(12))
assert await store.get_unspent_coins_for_wallet(1) == set([record_5])
assert await store.get_unspent_coins_for_wallet(2) == set([record_7])
assert await store.get_unspent_coins_for_wallet(3) == set()
await store.set_spent(coin_7.name(), uint32(12))
assert await store.get_unspent_coins_for_wallet(1) == set([record_5])
assert await store.get_unspent_coins_for_wallet(2) == set()
assert await store.get_unspent_coins_for_wallet(3) == set()
await store.set_spent(coin_5.name(), uint32(12))
assert await store.get_unspent_coins_for_wallet(1) == set()
assert await store.get_unspent_coins_for_wallet(2) == set()
assert await store.get_unspent_coins_for_wallet(3) == set()
@pytest.mark.asyncio
async def test_get_all_unspent_coins() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletCoinStore.create(db_wrapper)
assert await store.get_all_unspent_coins() == set()
await store.add_coin_record(record_1) # not spent
await store.add_coin_record(record_2) # not spent
await store.add_coin_record(record_3) # spent
assert await store.get_all_unspent_coins() == set([record_1, record_2])
await store.add_coin_record(record_4) # spent
await store.add_coin_record(record_5) # not spent
await store.add_coin_record(record_6) # spent
assert await store.get_all_unspent_coins() == set([record_1, record_2, record_5])
await store.add_coin_record(record_7) # not spent
assert await store.get_all_unspent_coins() == set([record_1, record_2, record_5, record_7])
await store.set_spent(coin_4.name(), uint32(12))
assert await store.get_all_unspent_coins() == set([record_1, record_2, record_5, record_7])
await store.set_spent(coin_7.name(), uint32(12))
assert await store.get_all_unspent_coins() == set([record_1, record_2, record_5])
await store.set_spent(coin_5.name(), uint32(12))
assert await store.get_all_unspent_coins() == set([record_1, record_2])
await store.set_spent(coin_2.name(), uint32(12))
await store.set_spent(coin_1.name(), uint32(12))
assert await store.get_all_unspent_coins() == set()
@pytest.mark.asyncio
async def test_get_records_by_parent_id() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletCoinStore.create(db_wrapper)
await store.add_coin_record(record_1)
await store.add_coin_record(record_2)
await store.add_coin_record(record_3)
await store.add_coin_record(record_4)
await store.add_coin_record(record_5)
await store.add_coin_record(record_6)
await store.add_coin_record(record_7)
assert set(await store.get_coin_records_by_parent_id(coin_1.parent_coin_info)) == set([record_1, record_2])
assert set(await store.get_coin_records_by_parent_id(coin_2.parent_coin_info)) == set([record_1, record_2])
assert await store.get_coin_records_by_parent_id(coin_3.parent_coin_info) == [record_3]
assert await store.get_coin_records_by_parent_id(coin_4.parent_coin_info) == [record_4]
assert await store.get_coin_records_by_parent_id(coin_5.parent_coin_info) == [record_5]
assert await store.get_coin_records_by_parent_id(coin_6.parent_coin_info) == [record_6]
assert await store.get_coin_records_by_parent_id(coin_7.parent_coin_info) == [record_7]
@pytest.mark.asyncio
async def test_get_multiple_coin_records() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletCoinStore.create(db_wrapper)
await store.add_coin_record(record_1)
await store.add_coin_record(record_2)
await store.add_coin_record(record_3)
await store.add_coin_record(record_4)
await store.add_coin_record(record_5)
await store.add_coin_record(record_6)
await store.add_coin_record(record_7)
assert set(await store.get_multiple_coin_records([coin_1.name(), coin_2.name(), coin_3.name()])) == set(
[record_1, record_2, record_3]
)
assert set(await store.get_multiple_coin_records([coin_5.name(), coin_6.name(), coin_7.name()])) == set(
[record_5, record_6, record_7]
)
assert set(
await store.get_multiple_coin_records(
[
coin_1.name(),
coin_2.name(),
coin_3.name(),
coin_4.name(),
coin_5.name(),
coin_6.name(),
coin_7.name(),
]
)
) == set([record_1, record_2, record_3, record_4, record_5, record_6, record_7])
@pytest.mark.asyncio
async def test_delete_coin_record() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletCoinStore.create(db_wrapper)
await store.add_coin_record(record_1)
await store.add_coin_record(record_2)
await store.add_coin_record(record_3)
await store.add_coin_record(record_4)
await store.add_coin_record(record_5)
await store.add_coin_record(record_6)
await store.add_coin_record(record_7)
assert set(
await store.get_multiple_coin_records(
[
coin_1.name(),
coin_2.name(),
coin_3.name(),
coin_4.name(),
coin_5.name(),
coin_6.name(),
coin_7.name(),
]
)
) == set([record_1, record_2, record_3, record_4, record_5, record_6, record_7])
assert await store.get_coin_record(coin_1.name()) == record_1
await store.delete_coin_record(coin_1.name())
assert await store.get_coin_record(coin_1.name()) is None
assert set(
await store.get_multiple_coin_records(
[coin_2.name(), coin_3.name(), coin_4.name(), coin_5.name(), coin_6.name(), coin_7.name()]
)
) == set([record_2, record_3, record_4, record_5, record_6, record_7])
def record(c: Coin, *, confirmed: int, spent: int) -> WalletCoinRecord:
return WalletCoinRecord(c, uint32(confirmed), uint32(spent), spent != 0, False, WalletType.STANDARD_WALLET, 0)
@pytest.mark.asyncio
async def test_get_coin_names_to_check() -> None:
r1 = record(coin_1, confirmed=1, spent=0)
r2 = record(coin_2, confirmed=2, spent=4)
r3 = record(coin_3, confirmed=3, spent=5)
r4 = record(coin_4, confirmed=4, spent=6)
r5 = record(coin_5, confirmed=5, spent=7)
# these spent heights violate the invariant
r6 = record(coin_6, confirmed=6, spent=1)
r7 = record(coin_7, confirmed=7, spent=2)
async with DBConnection(1) as db_wrapper:
store = await WalletCoinStore.create(db_wrapper)
await store.add_coin_record(r1)
await store.add_coin_record(r2)
await store.add_coin_record(r3)
await store.add_coin_record(r4)
await store.add_coin_record(r5)
await store.add_coin_record(r6)
await store.add_coin_record(r7)
for i in range(10):
coins = await store.get_coin_names_to_check(i)
# r1 is unspent and should always be included, regardless of height
assert r1.coin.name() in coins
# r2 was spent at height 4
assert (r2.coin.name() in coins) == (i < 4)
# r3 was spent at height 5
assert (r3.coin.name() in coins) == (i < 5)
# r4 was spent at height 6
assert (r4.coin.name() in coins) == (i < 6)
# r5 was spent at height 7
assert (r5.coin.name() in coins) == (i < 7)
# r6 was confirmed at height 6
assert (r6.coin.name() in coins) == (i < 6)
# r7 was confirmed at height 7
assert (r7.coin.name() in coins) == (i < 7)
@pytest.mark.asyncio
async def test_get_first_coin_height() -> None:
r1 = record(coin_1, confirmed=1, spent=0)
r2 = record(coin_2, confirmed=2, spent=4)
r3 = record(coin_3, confirmed=3, spent=5)
r4 = record(coin_4, confirmed=4, spent=6)
r5 = record(coin_5, confirmed=5, spent=7)
async with DBConnection(1) as db_wrapper:
store = await WalletCoinStore.create(db_wrapper)
assert await store.get_first_coin_height() is None
await store.add_coin_record(r5)
assert await store.get_first_coin_height() == 5
await store.add_coin_record(r4)
assert await store.get_first_coin_height() == 4
await store.add_coin_record(r3)
assert await store.get_first_coin_height() == 3
await store.add_coin_record(r2)
assert await store.get_first_coin_height() == 2
await store.add_coin_record(r1)
assert await store.get_first_coin_height() == 1
@pytest.mark.asyncio
async def test_rollback_to_block() -> None:
r1 = record(coin_1, confirmed=1, spent=0)
r2 = record(coin_2, confirmed=2, spent=4)
r3 = record(coin_3, confirmed=3, spent=5)
r4 = record(coin_4, confirmed=4, spent=6)
r5 = record(coin_5, confirmed=5, spent=7)
async with DBConnection(1) as db_wrapper:
store = await WalletCoinStore.create(db_wrapper)
await store.add_coin_record(r1)
await store.add_coin_record(r2)
await store.add_coin_record(r3)
await store.add_coin_record(r4)
await store.add_coin_record(r5)
assert set(
await store.get_multiple_coin_records(
[
coin_1.name(),
coin_2.name(),
coin_3.name(),
coin_4.name(),
coin_5.name(),
]
)
) == set(
[
r1,
r2,
r3,
r4,
r5,
]
)
assert await store.get_coin_record(coin_5.name()) == r5
await store.rollback_to_block(6)
new_r5 = await store.get_coin_record(coin_5.name())
assert not new_r5.spent
assert new_r5.spent_block_height == 0
assert new_r5 != r5
assert await store.get_coin_record(coin_4.name()) == r4
await store.rollback_to_block(4)
assert await store.get_coin_record(coin_5.name()) is None
new_r4 = await store.get_coin_record(coin_4.name())
assert not new_r4.spent
assert new_r4.spent_block_height == 0
assert new_r4 != r4
@pytest.mark.asyncio
async def test_count_small_unspent() -> None:
async with DBConnection(1) as db_wrapper:
store = await WalletCoinStore.create(db_wrapper)
coin_1 = Coin(token_bytes(32), token_bytes(32), uint64(1))
coin_2 = Coin(token_bytes(32), token_bytes(32), uint64(2))
coin_3 = Coin(token_bytes(32), token_bytes(32), uint64(4))
r1 = record(coin_1, confirmed=1, spent=0)
r2 = record(coin_2, confirmed=2, spent=0)
r3 = record(coin_3, confirmed=3, spent=0)
await store.add_coin_record(r1)
await store.add_coin_record(r2)
await store.add_coin_record(r3)
assert await store.count_small_unspent(5) == 3
assert await store.count_small_unspent(4) == 2
assert await store.count_small_unspent(3) == 2
assert await store.count_small_unspent(2) == 1
assert await store.count_small_unspent(1) == 0
await store.set_spent(coin_2.name(), uint32(12))
assert await store.count_small_unspent(5) == 2
assert await store.count_small_unspent(4) == 1
assert await store.count_small_unspent(3) == 1
assert await store.count_small_unspent(2) == 1
assert await store.count_small_unspent(1) == 0
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/test_singleton_lifecycle.py | tests/wallet/test_singleton_lifecycle.py | from typing import List, Tuple
import pytest
from blspy import G2Element
from clvm_tools import binutils
from flax.types.blockchain_format.program import Program, INFINITE_COST
from flax.types.announcement import Announcement
from flax.types.blockchain_format.coin import Coin
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.coin_spend import CoinSpend
from flax.types.condition_opcodes import ConditionOpcode
from flax.types.spend_bundle import SpendBundle
from flax.util.ints import uint64
from flax.wallet.puzzles.load_clvm import load_clvm
from tests.core.full_node.test_conditions import check_spend_bundle_validity, initial_blocks
SINGLETON_MOD = load_clvm("singleton_top_layer.clvm")
LAUNCHER_PUZZLE = load_clvm("singleton_launcher.clvm")
P2_SINGLETON_MOD = load_clvm("p2_singleton.clvm")
POOL_MEMBER_MOD = load_clvm("pool_member_innerpuz.clvm")
POOL_WAITINGROOM_MOD = load_clvm("pool_waitingroom_innerpuz.clvm")
LAUNCHER_PUZZLE_HASH = LAUNCHER_PUZZLE.get_tree_hash()
SINGLETON_MOD_HASH = SINGLETON_MOD.get_tree_hash()
POOL_REWARD_PREFIX_MAINNET = bytes32.fromhex("ccd5bb71183532bff220ba46c268991a00000000000000000000000000000000")
def check_coin_spend(coin_spend: CoinSpend):
try:
cost, result = coin_spend.puzzle_reveal.run_with_cost(INFINITE_COST, coin_spend.solution)
except Exception as ex:
print(ex)
def adaptor_for_singleton_inner_puzzle(puzzle: Program) -> Program:
# this is prety slow
return Program.to(binutils.assemble("(a (q . %s) 3)" % binutils.disassemble(puzzle)))
def launcher_conditions_and_spend_bundle(
parent_coin_id: bytes32,
launcher_amount: uint64,
initial_singleton_inner_puzzle: Program,
metadata: List[Tuple[str, str]],
launcher_puzzle: Program = LAUNCHER_PUZZLE,
) -> Tuple[Program, bytes32, List[Program], SpendBundle]:
launcher_puzzle_hash = launcher_puzzle.get_tree_hash()
launcher_coin = Coin(parent_coin_id, launcher_puzzle_hash, launcher_amount)
singleton_full_puzzle = SINGLETON_MOD.curry(
SINGLETON_MOD_HASH, launcher_coin.name(), launcher_puzzle_hash, initial_singleton_inner_puzzle
)
singleton_full_puzzle_hash = singleton_full_puzzle.get_tree_hash()
message_program = Program.to([singleton_full_puzzle_hash, launcher_amount, metadata])
expected_announcement = Announcement(launcher_coin.name(), message_program.get_tree_hash())
expected_conditions = []
expected_conditions.append(
Program.to(
binutils.assemble(f"(0x{ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT.hex()} 0x{expected_announcement.name()})")
)
)
expected_conditions.append(
Program.to(
binutils.assemble(f"(0x{ConditionOpcode.CREATE_COIN.hex()} 0x{launcher_puzzle_hash} {launcher_amount})")
)
)
launcher_solution = Program.to([singleton_full_puzzle_hash, launcher_amount, metadata])
coin_spend = CoinSpend(launcher_coin, launcher_puzzle, launcher_solution)
spend_bundle = SpendBundle([coin_spend], G2Element())
lineage_proof = Program.to([parent_coin_id, launcher_amount])
return lineage_proof, launcher_coin.name(), expected_conditions, spend_bundle
def singleton_puzzle(launcher_id: Program, launcher_puzzle_hash: bytes32, inner_puzzle: Program) -> Program:
return SINGLETON_MOD.curry(SINGLETON_MOD_HASH, launcher_id, launcher_puzzle_hash, inner_puzzle)
def singleton_puzzle_hash(launcher_id: Program, launcher_puzzle_hash: bytes32, inner_puzzle: Program) -> bytes32:
return singleton_puzzle(launcher_id, launcher_puzzle_hash, inner_puzzle).get_tree_hash()
def solution_for_singleton_puzzle(lineage_proof: Program, my_amount: int, inner_solution: Program) -> Program:
return Program.to([lineage_proof, my_amount, inner_solution])
def p2_singleton_puzzle(launcher_id: Program, launcher_puzzle_hash: bytes32) -> Program:
return P2_SINGLETON_MOD.curry(SINGLETON_MOD_HASH, launcher_id, launcher_puzzle_hash)
def p2_singleton_puzzle_hash(launcher_id: Program, launcher_puzzle_hash: bytes32) -> bytes32:
return p2_singleton_puzzle(launcher_id, launcher_puzzle_hash).get_tree_hash()
@pytest.mark.asyncio
async def test_only_odd_coins_0(bt):
blocks = await initial_blocks(bt)
farmed_coin = list(blocks[-1].get_included_reward_coins())[0]
metadata = [("foo", "bar")]
ANYONE_CAN_SPEND_PUZZLE = Program.to(1)
launcher_amount = uint64(1)
launcher_puzzle = LAUNCHER_PUZZLE
launcher_puzzle_hash = launcher_puzzle.get_tree_hash()
initial_singleton_puzzle = adaptor_for_singleton_inner_puzzle(ANYONE_CAN_SPEND_PUZZLE)
lineage_proof, launcher_id, condition_list, launcher_spend_bundle = launcher_conditions_and_spend_bundle(
farmed_coin.name(), launcher_amount, initial_singleton_puzzle, metadata, launcher_puzzle
)
conditions = Program.to(condition_list)
coin_spend = CoinSpend(farmed_coin, ANYONE_CAN_SPEND_PUZZLE, conditions)
spend_bundle = SpendBundle.aggregate([launcher_spend_bundle, SpendBundle([coin_spend], G2Element())])
coins_added, coins_removed = await check_spend_bundle_validity(bt, blocks, spend_bundle)
coin_set_added = set([_.coin for _ in coins_added])
coin_set_removed = set([_.coin for _ in coins_removed])
launcher_coin = launcher_spend_bundle.coin_spends[0].coin
assert launcher_coin in coin_set_added
assert launcher_coin in coin_set_removed
assert farmed_coin in coin_set_removed
singleton_expected_puzzle_hash = singleton_puzzle_hash(launcher_id, launcher_puzzle_hash, initial_singleton_puzzle)
expected_singleton_coin = Coin(launcher_coin.name(), singleton_expected_puzzle_hash, launcher_amount)
assert expected_singleton_coin in coin_set_added
# next up: spend the expected_singleton_coin
# it's an adapted `ANYONE_CAN_SPEND_PUZZLE`
# then try a bad lineage proof
# then try writing two odd coins
# then try writing zero odd coins
# then, destroy the singleton with the -113 hack
return 0
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/test_offer_parsing_performance.py | tests/wallet/test_offer_parsing_performance.py | from __future__ import annotations
import cProfile
from contextlib import contextmanager
from typing import Iterator
import pytest
from flax.wallet.trading.offer import Offer
from tests.util.misc import assert_runtime
with_profile = False
# gprof2dot -f pstats offer-parsing.profile >p.dot && dot -Tpng p.dot >offer-parsing.png
# gprof2dot -f pstats offered-coins.profile >c.dot && dot -Tpng c.dot >offered-coins.png
@contextmanager
def enable_profiler(name: str) -> Iterator[None]:
if not with_profile:
yield
return
with cProfile.Profile() as pr:
yield
pr.create_stats()
pr.dump_stats(f"{name}.profile")
@pytest.mark.benchmark
def test_offer_parsing_performance() -> None:
offer_bytes = bytes.fromhex(test_offer)
with assert_runtime(seconds=2, label="Offer.from_bytes()"):
with enable_profiler("offer-parsing"):
for _ in range(100):
o = Offer.from_bytes(offer_bytes)
assert o is not None
@pytest.mark.benchmark
def test_offered_coins_performance() -> None:
offer_bytes = bytes.fromhex(test_offer)
o = Offer.from_bytes(offer_bytes)
with assert_runtime(seconds=2.5, label="Offer.from_bytes()"):
with enable_profiler("offered-coins"):
for _ in range(100):
c = o.get_offered_coins()
assert len(c.items()) > 0
test_offer = str(
"0000000200000000000000000000000000000000000000000000000000000000"
"00000000bae24162efbd568f89bc7a340798a6118df0189eb9e3f8697bcea27a"
"f99f8f790000000000000000ff02ffff01ff02ff0affff04ff02ffff04ff03ff"
"80808080ffff04ffff01ffff333effff02ffff03ff05ffff01ff04ffff04ff0c"
"ffff04ffff02ff1effff04ff02ffff04ff09ff80808080ff808080ffff02ff16"
"ffff04ff02ffff04ff19ffff04ffff02ff0affff04ff02ffff04ff0dff808080"
"80ff808080808080ff8080ff0180ffff02ffff03ff05ffff01ff04ffff04ff08"
"ff0980ffff02ff16ffff04ff02ffff04ff0dffff04ff0bff808080808080ffff"
"010b80ff0180ff02ffff03ffff07ff0580ffff01ff0bffff0102ffff02ff1eff"
"ff04ff02ffff04ff09ff80808080ffff02ff1effff04ff02ffff04ff0dff8080"
"808080ffff01ff0bffff0101ff058080ff0180ff018080ffffa0113e4b68cb75"
"5a6e4347f4d93e3d942ad1d89aadef6536dad229fe5fbe6ab232ffffa0e13f56"
"72075e5ac9a50bcf080fca54762b2a59e22f37951f56802603ec2fe6e1ff64ff"
"80808080ee2b6845e1c317976b002adc4d1dc48d2b752b9f47a3c1ecad4df36a"
"2905d5add1ee4d5798f94b10f9487e314a9561a7a757d2fcf29d8d461106f04b"
"8e303b790000000000000001ff02ffff01ff02ffff01ff02ffff03ffff18ff2f"
"ff3480ffff01ff04ffff04ff20ffff04ff2fff808080ffff04ffff02ff3effff"
"04ff02ffff04ff05ffff04ffff02ff2affff04ff02ffff04ff27ffff04ffff02"
"ffff03ff77ffff01ff02ff36ffff04ff02ffff04ff09ffff04ff57ffff04ffff"
"02ff2effff04ff02ffff04ff05ff80808080ff808080808080ffff011d80ff01"
"80ffff04ffff02ffff03ff77ffff0181b7ffff015780ff0180ff808080808080"
"ffff04ff77ff808080808080ffff02ff3affff04ff02ffff04ff05ffff04ffff"
"02ff0bff5f80ffff01ff8080808080808080ffff01ff088080ff0180ffff04ff"
"ff01ffffffff4947ff0233ffff0401ff0102ffffff20ff02ffff03ff05ffff01"
"ff02ff32ffff04ff02ffff04ff0dffff04ffff0bff3cffff0bff34ff2480ffff"
"0bff3cffff0bff3cffff0bff34ff2c80ff0980ffff0bff3cff0bffff0bff34ff"
"8080808080ff8080808080ffff010b80ff0180ffff02ffff03ffff22ffff09ff"
"ff0dff0580ff2280ffff09ffff0dff0b80ff2280ffff15ff17ffff0181ff8080"
"ffff01ff0bff05ff0bff1780ffff01ff088080ff0180ff02ffff03ff0bffff01"
"ff02ffff03ffff02ff26ffff04ff02ffff04ff13ff80808080ffff01ff02ffff"
"03ffff20ff1780ffff01ff02ffff03ffff09ff81b3ffff01818f80ffff01ff02"
"ff3affff04ff02ffff04ff05ffff04ff1bffff04ff34ff808080808080ffff01"
"ff04ffff04ff23ffff04ffff02ff36ffff04ff02ffff04ff09ffff04ff53ffff"
"04ffff02ff2effff04ff02ffff04ff05ff80808080ff808080808080ff738080"
"ffff02ff3affff04ff02ffff04ff05ffff04ff1bffff04ff34ff808080808080"
"8080ff0180ffff01ff088080ff0180ffff01ff04ff13ffff02ff3affff04ff02"
"ffff04ff05ffff04ff1bffff04ff17ff8080808080808080ff0180ffff01ff02"
"ffff03ff17ff80ffff01ff088080ff018080ff0180ffffff02ffff03ffff09ff"
"09ff3880ffff01ff02ffff03ffff18ff2dffff010180ffff01ff0101ff8080ff"
"0180ff8080ff0180ff0bff3cffff0bff34ff2880ffff0bff3cffff0bff3cffff"
"0bff34ff2c80ff0580ffff0bff3cffff02ff32ffff04ff02ffff04ff07ffff04"
"ffff0bff34ff3480ff8080808080ffff0bff34ff8080808080ffff02ffff03ff"
"ff07ff0580ffff01ff0bffff0102ffff02ff2effff04ff02ffff04ff09ff8080"
"8080ffff02ff2effff04ff02ffff04ff0dff8080808080ffff01ff0bffff0101"
"ff058080ff0180ff02ffff03ffff21ff17ffff09ff0bff158080ffff01ff04ff"
"30ffff04ff0bff808080ffff01ff088080ff0180ff018080ffff04ffff01ffa0"
"7faa3253bfddd1e0decb0906b2dc6247bbc4cf608f58345d173adb63e8b47c9f"
"ffa02268aba6ee7b6a26b6f8abc2c00938e413a8aa128d1ba1bdc4a9bfb84e62"
"aa2da0eff07522495060c066f66f32acc2a77e3a3e737aca8baea4d1a64ea4cd"
"c13da9ffff04ffff01ff02ffff01ff02ffff01ff02ff3effff04ff02ffff04ff"
"05ffff04ffff02ff2fff5f80ffff04ff80ffff04ffff04ffff04ff0bffff04ff"
"17ff808080ffff01ff808080ffff01ff8080808080808080ffff04ffff01ffff"
"ff0233ff04ff0101ffff02ff02ffff03ff05ffff01ff02ff1affff04ff02ffff"
"04ff0dffff04ffff0bff12ffff0bff2cff1480ffff0bff12ffff0bff12ffff0b"
"ff2cff3c80ff0980ffff0bff12ff0bffff0bff2cff8080808080ff8080808080"
"ffff010b80ff0180ffff0bff12ffff0bff2cff1080ffff0bff12ffff0bff12ff"
"ff0bff2cff3c80ff0580ffff0bff12ffff02ff1affff04ff02ffff04ff07ffff"
"04ffff0bff2cff2c80ff8080808080ffff0bff2cff8080808080ffff02ffff03"
"ffff07ff0580ffff01ff0bffff0102ffff02ff2effff04ff02ffff04ff09ff80"
"808080ffff02ff2effff04ff02ffff04ff0dff8080808080ffff01ff0bffff01"
"01ff058080ff0180ff02ffff03ff0bffff01ff02ffff03ffff09ff23ff1880ff"
"ff01ff02ffff03ffff18ff81b3ff2c80ffff01ff02ffff03ffff20ff1780ffff"
"01ff02ff3effff04ff02ffff04ff05ffff04ff1bffff04ff33ffff04ff2fffff"
"04ff5fff8080808080808080ffff01ff088080ff0180ffff01ff04ff13ffff02"
"ff3effff04ff02ffff04ff05ffff04ff1bffff04ff17ffff04ff2fffff04ff5f"
"ff80808080808080808080ff0180ffff01ff02ffff03ffff09ff23ffff0181e8"
"80ffff01ff02ff3effff04ff02ffff04ff05ffff04ff1bffff04ff17ffff04ff"
"ff02ffff03ffff22ffff09ffff02ff2effff04ff02ffff04ff53ff80808080ff"
"82014f80ffff20ff5f8080ffff01ff02ff53ffff04ff818fffff04ff82014fff"
"ff04ff81b3ff8080808080ffff01ff088080ff0180ffff04ff2cff8080808080"
"808080ffff01ff04ff13ffff02ff3effff04ff02ffff04ff05ffff04ff1bffff"
"04ff17ffff04ff2fffff04ff5fff80808080808080808080ff018080ff0180ff"
"ff01ff04ffff04ff18ffff04ffff02ff16ffff04ff02ffff04ff05ffff04ff27"
"ffff04ffff0bff2cff82014f80ffff04ffff02ff2effff04ff02ffff04ff818f"
"ff80808080ffff04ffff0bff2cff0580ff8080808080808080ff378080ff81af"
"8080ff0180ff018080ffff04ffff01a0a04d9f57764f54a43e4030befb4d8002"
"6e870519aaa66334aef8304f5d0393c2ffff04ffff01ffff75ff9d6874747073"
"3a2f2f70696373756d2e70686f746f732f3337372f38313180ffff68a0452062"
"a44018653e22198e70a0e756641361b8ec3bc466c1924a38d372e1a945ffff82"
"6d75ff9668747470733a2f2f7777772e6d656a69612e636f6d2f80ffff826c75"
"ff93687474703a2f2f616775697272652e6e65742f80ffff82736e01ffff8273"
"7401ffff826d68a01f462ea72e639eca6ebe792caeb296491177454fe2c763cb"
"9b08e52e85c02712ffff826c68a0b794d0dfa36ac60ff17b0b3649adbc44a703"
"8713bf8acfeaf4bb57dd276dd7ec80ffff04ffff01a0fe8a4b4e27a2e29a4d3f"
"c7ce9d527adbcaccbab6ada3903ccf3ba9a769d2d78bffff04ffff01ff02ffff"
"01ff02ffff01ff02ff26ffff04ff02ffff04ff05ffff04ff17ffff04ff0bffff"
"04ffff02ff2fff5f80ff80808080808080ffff04ffff01ffffff82ad4cff0233"
"ffff3e04ff81f601ffffff0102ffff02ffff03ff05ffff01ff02ff2affff04ff"
"02ffff04ff0dffff04ffff0bff32ffff0bff3cff3480ffff0bff32ffff0bff32"
"ffff0bff3cff2280ff0980ffff0bff32ff0bffff0bff3cff8080808080ff8080"
"808080ffff010b80ff0180ff04ffff04ff38ffff04ffff02ff36ffff04ff02ff"
"ff04ff05ffff04ff27ffff04ffff02ff2effff04ff02ffff04ffff02ffff03ff"
"81afffff0181afffff010b80ff0180ff80808080ffff04ffff0bff3cff4f80ff"
"ff04ffff0bff3cff0580ff8080808080808080ff378080ff82016f80ffffff02"
"ff3effff04ff02ffff04ff05ffff04ff0bffff04ff17ffff04ff2fffff04ff2f"
"ffff01ff80ff808080808080808080ff0bff32ffff0bff3cff2880ffff0bff32"
"ffff0bff32ffff0bff3cff2280ff0580ffff0bff32ffff02ff2affff04ff02ff"
"ff04ff07ffff04ffff0bff3cff3c80ff8080808080ffff0bff3cff8080808080"
"ffff02ffff03ffff07ff0580ffff01ff0bffff0102ffff02ff2effff04ff02ff"
"ff04ff09ff80808080ffff02ff2effff04ff02ffff04ff0dff8080808080ffff"
"01ff0bffff0101ff058080ff0180ff02ffff03ff5fffff01ff02ffff03ffff09"
"ff82011fff3880ffff01ff02ffff03ffff09ffff18ff82059f80ff3c80ffff01"
"ff02ffff03ffff20ff81bf80ffff01ff02ff3effff04ff02ffff04ff05ffff04"
"ff0bffff04ff17ffff04ff2fffff04ff81dfffff04ff82019fffff04ff82017f"
"ff80808080808080808080ffff01ff088080ff0180ffff01ff04ff819fffff02"
"ff3effff04ff02ffff04ff05ffff04ff0bffff04ff17ffff04ff2fffff04ff81"
"dfffff04ff81bfffff04ff82017fff808080808080808080808080ff0180ffff"
"01ff02ffff03ffff09ff82011fff2c80ffff01ff02ffff03ffff20ff82017f80"
"ffff01ff04ffff04ff24ffff04ffff0eff10ffff02ff2effff04ff02ffff04ff"
"82019fff8080808080ff808080ffff02ff3effff04ff02ffff04ff05ffff04ff"
"0bffff04ff17ffff04ff2fffff04ff81dfffff04ff81bfffff04ffff02ff0bff"
"ff04ff17ffff04ff2fffff04ff82019fff8080808080ff808080808080808080"
"8080ffff01ff088080ff0180ffff01ff02ffff03ffff09ff82011fff2480ffff"
"01ff02ffff03ffff20ffff02ffff03ffff09ffff0122ffff0dff82029f8080ff"
"ff01ff02ffff03ffff09ffff0cff82029fff80ffff010280ff1080ffff01ff01"
"01ff8080ff0180ff8080ff018080ffff01ff04ff819fffff02ff3effff04ff02"
"ffff04ff05ffff04ff0bffff04ff17ffff04ff2fffff04ff81dfffff04ff81bf"
"ffff04ff82017fff8080808080808080808080ffff01ff088080ff0180ffff01"
"ff04ff819fffff02ff3effff04ff02ffff04ff05ffff04ff0bffff04ff17ffff"
"04ff2fffff04ff81dfffff04ff81bfffff04ff82017fff808080808080808080"
"808080ff018080ff018080ff0180ffff01ff02ff3affff04ff02ffff04ff05ff"
"ff04ff0bffff04ff81bfffff04ffff02ffff03ff82017fffff0182017fffff01"
"ff02ff0bffff04ff17ffff04ff2fffff01ff808080808080ff0180ff80808080"
"80808080ff0180ff018080ffff04ffff01a0c5abea79afaa001b5427dfa0c8cf"
"42ca6f38f5841b78f9b3c252733eb2de2726ffff04ffff0180ffff04ffff01ff"
"02ffff01ff02ffff01ff02ffff03ff81bfffff01ff04ff82013fffff04ff80ff"
"ff04ffff02ffff03ffff22ff82013fffff20ffff09ff82013fff2f808080ffff"
"01ff04ffff04ff10ffff04ffff0bffff02ff2effff04ff02ffff04ff09ffff04"
"ff8205bfffff04ffff02ff3effff04ff02ffff04ffff04ff09ffff04ff82013f"
"ff1d8080ff80808080ff808080808080ff1580ff808080ffff02ff16ffff04ff"
"02ffff04ff0bffff04ff17ffff04ff8202bfffff04ff15ff8080808080808080"
"ffff01ff02ff16ffff04ff02ffff04ff0bffff04ff17ffff04ff8202bfffff04"
"ff15ff8080808080808080ff0180ff80808080ffff01ff04ff2fffff01ff80ff"
"80808080ff0180ffff04ffff01ffffff3f02ff04ff0101ffff822710ff02ff02"
"ffff03ff05ffff01ff02ff3affff04ff02ffff04ff0dffff04ffff0bff2affff"
"0bff2cff1480ffff0bff2affff0bff2affff0bff2cff3c80ff0980ffff0bff2a"
"ff0bffff0bff2cff8080808080ff8080808080ffff010b80ff0180ffff02ffff"
"03ff17ffff01ff04ffff04ff10ffff04ffff0bff81a7ffff02ff3effff04ff02"
"ffff04ffff04ff2fffff04ffff04ff05ffff04ffff05ffff14ffff12ff47ff0b"
"80ff128080ffff04ffff04ff05ff8080ff80808080ff808080ff8080808080ff"
"808080ffff02ff16ffff04ff02ffff04ff05ffff04ff0bffff04ff37ffff04ff"
"2fff8080808080808080ff8080ff0180ffff0bff2affff0bff2cff1880ffff0b"
"ff2affff0bff2affff0bff2cff3c80ff0580ffff0bff2affff02ff3affff04ff"
"02ffff04ff07ffff04ffff0bff2cff2c80ff8080808080ffff0bff2cff808080"
"8080ff02ffff03ffff07ff0580ffff01ff0bffff0102ffff02ff3effff04ff02"
"ffff04ff09ff80808080ffff02ff3effff04ff02ffff04ff0dff8080808080ff"
"ff01ff0bffff0101ff058080ff0180ff018080ffff04ffff01ffa07faa3253bf"
"ddd1e0decb0906b2dc6247bbc4cf608f58345d173adb63e8b47c9fffa02268ab"
"a6ee7b6a26b6f8abc2c00938e413a8aa128d1ba1bdc4a9bfb84e62aa2da0eff0"
"7522495060c066f66f32acc2a77e3a3e737aca8baea4d1a64ea4cdc13da9ffff"
"04ffff01a003d5d19244dfe1fffc3de5f9e1ded13bd5fb47340e798c9d042d7c"
"d9a101ca09ffff04ffff0182012cff0180808080ffff04ffff01ff02ffff01ff"
"02ffff01ff02ffff03ff0bffff01ff02ffff03ffff09ff05ffff1dff0bffff1e"
"ffff0bff0bffff02ff06ffff04ff02ffff04ff17ff8080808080808080ffff01"
"ff02ff17ff2f80ffff01ff088080ff0180ffff01ff04ffff04ff04ffff04ff05"
"ffff04ffff02ff06ffff04ff02ffff04ff17ff80808080ff80808080ffff02ff"
"17ff2f808080ff0180ffff04ffff01ff32ff02ffff03ffff07ff0580ffff01ff"
"0bffff0102ffff02ff06ffff04ff02ffff04ff09ff80808080ffff02ff06ffff"
"04ff02ffff04ff0dff8080808080ffff01ff0bffff0101ff058080ff0180ff01"
"8080ffff04ffff01b0b4652a5c069d8498cd4b0dd1bd5198176078b466264651"
"7d51e28344b2e714d8cefb781e4dbb5d4cc7e0cba7b4b50d77ff018080ff0180"
"80808080ff018080808080ff01808080ffffa02268aba6ee7b6a26b6f8abc2c0"
"0938e413a8aa128d1ba1bdc4a9bfb84e62aa2dffa012b92f169ae6991c481b3f"
"43e17b821cd8aec6bb1614bbe9042e6f9c734979aeff0180ff01ffffffff80ff"
"ff01ffff81f6ff80ffffff64ffa0bae24162efbd568f89bc7a340798a6118df0"
"189eb9e3f8697bcea27af99f8f798080ff8080ffff33ffa0bae24162efbd568f"
"89bc7a340798a6118df0189eb9e3f8697bcea27af99f8f79ff01ffffa0bae241"
"62efbd568f89bc7a340798a6118df0189eb9e3f8697bcea27af99f8f798080ff"
"ff3fffa01791f8e6d86d66bca42867c0be163909c07c46dfb3bb6660f1fe8b6b"
"0cb952e48080ff808080808096a0c4136217c2c2cc4eb525ba7aa14d166d0353"
"9e5d1ce733a28592fc4adf52a92f873e96ac8a3dfc02964f102dca750768cade"
"7acbf0055da31d080b9894768971906509062e2255634f14e4e6f7acd68b7c40"
"d1526e5ca0b489b7afd60762",
)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/simple_sync/config.py | tests/wallet/simple_sync/config.py | from __future__ import annotations
checkout_blocks_and_plots = True
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/simple_sync/__init__.py | tests/wallet/simple_sync/__init__.py | python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false | |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/simple_sync/test_simple_sync_protocol.py | tests/wallet/simple_sync/test_simple_sync_protocol.py | # flake8: noqa: F811, F401
import asyncio
from typing import List, Optional
import pytest
from clvm.casts import int_to_bytes
from colorlog import getLogger
from flax.consensus.block_rewards import calculate_pool_reward, calculate_base_farmer_reward
from flax.protocols import wallet_protocol
from flax.protocols.full_node_protocol import RespondTransaction
from flax.protocols.protocol_message_types import ProtocolMessageTypes
from flax.protocols.wallet_protocol import RespondToCoinUpdates, CoinStateUpdate, RespondToPhUpdates
from flax.server.outbound_message import NodeType
from flax.simulator.simulator_protocol import FarmNewBlockProtocol, ReorgProtocol
from flax.types.blockchain_format.coin import Coin
from flax.types.coin_record import CoinRecord
from flax.types.condition_opcodes import ConditionOpcode
from flax.types.condition_with_args import ConditionWithArgs
from flax.types.peer_info import PeerInfo
from flax.types.spend_bundle import SpendBundle
from flax.util.ints import uint16, uint32, uint64
from flax.wallet.wallet import Wallet
from flax.wallet.wallet_state_manager import WalletStateManager
from tests.util.wallet_is_synced import wallet_is_synced
from tests.connection_utils import add_dummy_connection
from flax.simulator.time_out_assert import time_out_assert
from tests.wallet.cat_wallet.test_cat_wallet import tx_in_pool
from flax.simulator.wallet_tools import WalletTool
def wallet_height_at_least(wallet_node, h):
height = wallet_node.wallet_state_manager.blockchain._peak_height
if height == h:
return True
return False
log = getLogger(__name__)
async def get_all_messages_in_queue(queue):
all_messages = []
await asyncio.sleep(2)
while not queue.empty():
message, peer = await queue.get()
all_messages.append(message)
return all_messages
class TestSimpleSyncProtocol:
@pytest.mark.asyncio
async def test_subscribe_for_ph(self, wallet_node_simulator, self_hostname):
num_blocks = 4
full_nodes, wallets, _ = wallet_node_simulator
full_node_api = full_nodes[0]
wallet_node, server_2 = wallets[0]
fn_server = full_node_api.full_node.server
wsm: WalletStateManager = wallet_node.wallet_state_manager
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
incoming_queue, peer_id = await add_dummy_connection(fn_server, self_hostname, 12312, NodeType.WALLET)
zero_ph = 32 * b"\0"
junk_ph = 32 * b"\a"
fake_wallet_peer = fn_server.all_connections[peer_id]
msg = wallet_protocol.RegisterForPhUpdates([zero_ph], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
assert msg_response.type == ProtocolMessageTypes.respond_to_ph_update.value
data_response: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert data_response.coin_states == []
# Farm few more with reward
for i in range(0, num_blocks):
if i == num_blocks - 1:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(junk_ph))
else:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
msg = wallet_protocol.RegisterForPhUpdates([zero_ph], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
assert msg_response.type == ProtocolMessageTypes.respond_to_ph_update.value
data_response: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert len(data_response.coin_states) == 2 * num_blocks # 2 per height farmer / pool reward
# Farm more rewards to check the incoming queue for the updates
for i in range(0, num_blocks):
if i == num_blocks - 1:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(junk_ph))
else:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
all_messages = await get_all_messages_in_queue(incoming_queue)
zero_coin = await full_node_api.full_node.coin_store.get_coin_states_by_puzzle_hashes(True, [zero_ph])
all_zero_coin = set(zero_coin)
notified_zero_coins = set()
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
for coin_state in data_response.items:
notified_zero_coins.add(coin_state)
assert len(data_response.items) == 2 # 2 per height farmer / pool reward
assert all_zero_coin == notified_zero_coins
# Test subscribing to more coins
one_ph = 32 * b"\1"
msg = wallet_protocol.RegisterForPhUpdates([one_ph], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
peak = full_node_api.full_node.blockchain.get_peak()
for i in range(0, num_blocks):
if i == num_blocks - 1:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(junk_ph))
else:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
for i in range(0, num_blocks):
if i == num_blocks - 1:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(one_ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(junk_ph))
else:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(one_ph))
zero_coins = await full_node_api.full_node.coin_store.get_coin_states_by_puzzle_hashes(
True, [zero_ph], peak.height + 1
)
one_coins = await full_node_api.full_node.coin_store.get_coin_states_by_puzzle_hashes(True, [one_ph])
all_coins = set(zero_coins)
all_coins.update(one_coins)
all_messages = await get_all_messages_in_queue(incoming_queue)
notified_all_coins = set()
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
for coin_state in data_response.items:
notified_all_coins.add(coin_state)
assert len(data_response.items) == 2 # 2 per height farmer / pool reward
assert all_coins == notified_all_coins
wsm: WalletStateManager = wallet_node.wallet_state_manager
wallet: Wallet = wsm.wallets[1]
puzzle_hash = await wallet.get_new_puzzlehash()
for i in range(0, num_blocks):
if i == num_blocks - 1:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(junk_ph))
else:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
funds = sum(
[
calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
for i in range(1, num_blocks + 1)
]
)
fn_amount = sum(
cr.coin.amount
for cr in await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(False, puzzle_hash)
)
await time_out_assert(20, wallet.get_confirmed_balance, funds)
assert funds == fn_amount
msg_1 = wallet_protocol.RegisterForPhUpdates([puzzle_hash], 0)
msg_response_1 = await full_node_api.register_interest_in_puzzle_hash(msg_1, fake_wallet_peer)
assert msg_response_1.type == ProtocolMessageTypes.respond_to_ph_update.value
data_response_1: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response_1.data)
assert len(data_response_1.coin_states) == 2 * num_blocks # 2 per height farmer / pool reward
await time_out_assert(20, wallet_is_synced, True, wallet_node, full_node_api)
tx_record = await wallet.generate_signed_transaction(uint64(10), puzzle_hash, uint64(0))
assert len(tx_record.spend_bundle.removals()) == 1
spent_coin = tx_record.spend_bundle.removals()[0]
assert spent_coin.puzzle_hash == puzzle_hash
await wallet.push_transaction(tx_record)
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
# Let's make sure the wallet can handle a non ephemeral launcher
from flax.wallet.puzzles.singleton_top_layer import SINGLETON_LAUNCHER_HASH
await time_out_assert(20, wallet_is_synced, True, wallet_node, full_node_api)
tx_record = await wallet.generate_signed_transaction(uint64(10), SINGLETON_LAUNCHER_HASH, uint64(0))
await wallet.push_transaction(tx_record)
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(SINGLETON_LAUNCHER_HASH))
await time_out_assert(20, wallet_is_synced, True, wallet_node, full_node_api)
# Send a transaction to make sure the wallet is still running
tx_record = await wallet.generate_signed_transaction(uint64(10), junk_ph, uint64(0))
await wallet.push_transaction(tx_record)
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
all_messages = await get_all_messages_in_queue(incoming_queue)
notified_state = None
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
for coin_state in data_response.items:
if coin_state.coin.name() == spent_coin.name():
notified_state = coin_state
assert notified_state is not None
assert notified_state.coin == spent_coin
assert notified_state.spent_height is not None
@pytest.mark.asyncio
async def test_subscribe_for_coin_id(self, wallet_node_simulator, self_hostname):
num_blocks = 4
full_nodes, wallets, _ = wallet_node_simulator
full_node_api = full_nodes[0]
wallet_node, server_2 = wallets[0]
fn_server = full_node_api.full_node.server
wsm: WalletStateManager = wallet_node.wallet_state_manager
standard_wallet: Wallet = wsm.wallets[1]
puzzle_hash = await standard_wallet.get_new_puzzlehash()
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
incoming_queue, peer_id = await add_dummy_connection(fn_server, self_hostname, 12312, NodeType.WALLET)
fake_wallet_peer = fn_server.all_connections[peer_id]
# Farm to create a coin that we'll track
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
funds = sum(
[calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks)]
)
await time_out_assert(20, standard_wallet.get_confirmed_balance, funds)
my_coins: List[CoinRecord] = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(
True, puzzle_hash
)
coin_to_spend = my_coins[0].coin
msg = wallet_protocol.RegisterForCoinUpdates([coin_to_spend.name()], 0)
msg_response = await full_node_api.register_interest_in_coin(msg, fake_wallet_peer)
assert msg_response is not None
assert msg_response.type == ProtocolMessageTypes.respond_to_coin_update.value
data_response: RespondToCoinUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert data_response.coin_states[0].coin == coin_to_spend
coins = set()
coins.add(coin_to_spend)
tx_record = await standard_wallet.generate_signed_transaction(uint64(10), puzzle_hash, uint64(0), coins=coins)
await standard_wallet.push_transaction(tx_record)
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
# Farm transaction
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
all_messages = await get_all_messages_in_queue(incoming_queue)
notified_coins = set()
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
for coin_state in data_response.items:
notified_coins.add(coin_state.coin)
assert coin_state.spent_height is not None
assert notified_coins == coins
# Test getting notification for coin that is about to be created
await time_out_assert(20, wallet_is_synced, True, wallet_node, full_node_api)
tx_record = await standard_wallet.generate_signed_transaction(uint64(10), puzzle_hash, uint64(0))
tx_record.spend_bundle.additions()
added_target: Optional[Coin] = None
for coin in tx_record.spend_bundle.additions():
if coin.puzzle_hash == puzzle_hash:
added_target = coin
assert added_target is not None
msg = wallet_protocol.RegisterForCoinUpdates([added_target.name()], 0)
msg_response = await full_node_api.register_interest_in_coin(msg, fake_wallet_peer)
assert msg_response is not None
assert msg_response.type == ProtocolMessageTypes.respond_to_coin_update.value
data_response: RespondToCoinUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert len(data_response.coin_states) == 0
await standard_wallet.push_transaction(tx_record)
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
all_messages = await get_all_messages_in_queue(incoming_queue)
notified_state = None
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
for coin_state in data_response.items:
if coin_state.coin.name() == added_target.name():
notified_state = coin_state
assert notified_state is not None
assert notified_state.coin == added_target
assert notified_state.spent_height is None
@pytest.mark.asyncio
async def test_subscribe_for_ph_reorg(self, wallet_node_simulator, self_hostname):
num_blocks = 4
long_blocks = 20
full_nodes, wallets, _ = wallet_node_simulator
full_node_api = full_nodes[0]
wallet_node, server_2 = wallets[0]
fn_server = full_node_api.full_node.server
wsm: WalletStateManager = wallet_node.wallet_state_manager
standard_wallet: Wallet = wsm.wallets[1]
puzzle_hash = await standard_wallet.get_new_puzzlehash()
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
incoming_queue, peer_id = await add_dummy_connection(fn_server, self_hostname, 12312, NodeType.WALLET)
fake_wallet_peer = fn_server.all_connections[peer_id]
zero_ph = 32 * b"\0"
# Farm to create a coin that we'll track
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
for i in range(0, long_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
msg = wallet_protocol.RegisterForPhUpdates([puzzle_hash], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
assert msg_response is not None
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
expected_height = uint32(long_blocks + 2 * num_blocks + 1)
await time_out_assert(20, full_node_api.full_node.blockchain.get_peak_height, expected_height)
coin_records = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(True, puzzle_hash)
assert len(coin_records) > 0
fork_height = expected_height - num_blocks - 5
req = ReorgProtocol(fork_height, expected_height + 5, zero_ph, None)
await full_node_api.reorg_from_index_to_new_index(req)
coin_records = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(True, puzzle_hash)
assert coin_records == []
all_messages = await get_all_messages_in_queue(incoming_queue)
coin_update_messages = []
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
coin_update_messages.append(data_response)
# First state is creation, second one is a reorg
assert len(coin_update_messages) == 2
first = coin_update_messages[0]
assert len(first.items) == 2
first_state_coin_1 = first.items[0]
assert first_state_coin_1.spent_height is None
assert first_state_coin_1.created_height is not None
first_state_coin_2 = first.items[1]
assert first_state_coin_2.spent_height is None
assert first_state_coin_2.created_height is not None
second = coin_update_messages[1]
assert second.fork_height == fork_height
assert len(second.items) == 2
second_state_coin_1 = second.items[0]
assert second_state_coin_1.spent_height is None
assert second_state_coin_1.created_height is None
second_state_coin_2 = second.items[1]
assert second_state_coin_2.spent_height is None
assert second_state_coin_2.created_height is None
@pytest.mark.asyncio
async def test_subscribe_for_coin_id_reorg(self, wallet_node_simulator, self_hostname):
num_blocks = 4
long_blocks = 20
full_nodes, wallets, _ = wallet_node_simulator
full_node_api = full_nodes[0]
wallet_node, server_2 = wallets[0]
fn_server = full_node_api.full_node.server
wsm: WalletStateManager = wallet_node.wallet_state_manager
standard_wallet: Wallet = wsm.wallets[1]
puzzle_hash = await standard_wallet.get_new_puzzlehash()
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
incoming_queue, peer_id = await add_dummy_connection(fn_server, self_hostname, 12312, NodeType.WALLET)
fake_wallet_peer = fn_server.all_connections[peer_id]
zero_ph = 32 * b"\0"
# Farm to create a coin that we'll track
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
for i in range(0, long_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
expected_height = uint32(long_blocks + 2 * num_blocks + 1)
await time_out_assert(20, full_node_api.full_node.blockchain.get_peak_height, expected_height)
coin_records = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(True, puzzle_hash)
assert len(coin_records) > 0
for coin_rec in coin_records:
msg = wallet_protocol.RegisterForCoinUpdates([coin_rec.name], 0)
msg_response = await full_node_api.register_interest_in_coin(msg, fake_wallet_peer)
assert msg_response is not None
fork_height = expected_height - num_blocks - 5
req = ReorgProtocol(fork_height, expected_height + 5, zero_ph, None)
await full_node_api.reorg_from_index_to_new_index(req)
coin_records = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(True, puzzle_hash)
assert coin_records == []
all_messages = await get_all_messages_in_queue(incoming_queue)
coin_update_messages = []
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
coin_update_messages.append(data_response)
assert len(coin_update_messages) == 1
update = coin_update_messages[0]
coin_states = update.items
assert len(coin_states) == 2
first_coin = coin_states[0]
assert first_coin.spent_height is None
assert first_coin.created_height is None
second_coin = coin_states[1]
assert second_coin.spent_height is None
assert second_coin.created_height is None
@pytest.mark.asyncio
async def test_subscribe_for_hint(self, wallet_node_simulator, self_hostname):
num_blocks = 4
full_nodes, wallets, bt = wallet_node_simulator
full_node_api = full_nodes[0]
wallet_node, server_2 = wallets[0]
fn_server = full_node_api.full_node.server
wsm: WalletStateManager = wallet_node.wallet_state_manager
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
incoming_queue, peer_id = await add_dummy_connection(fn_server, self_hostname, 12312, NodeType.WALLET)
wt: WalletTool = bt.get_pool_wallet_tool()
ph = wt.get_new_puzzlehash()
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
await asyncio.sleep(6)
coins = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hashes(False, [ph])
coin_spent = coins[0].coin
hint_puzzle_hash = 32 * b"\2"
amount = 1
amount_bin = int_to_bytes(1)
hint = 32 * b"\5"
fake_wallet_peer = fn_server.all_connections[peer_id]
msg = wallet_protocol.RegisterForPhUpdates([hint], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
assert msg_response.type == ProtocolMessageTypes.respond_to_ph_update.value
data_response: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert len(data_response.coin_states) == 0
condition_dict = {
ConditionOpcode.CREATE_COIN: [
ConditionWithArgs(ConditionOpcode.CREATE_COIN, [hint_puzzle_hash, amount_bin, hint])
]
}
await time_out_assert(20, wallet_is_synced, True, wallet_node, full_node_api)
tx: SpendBundle = wt.generate_signed_transaction(
10,
wt.get_new_puzzlehash(),
coin_spent,
condition_dic=condition_dict,
)
await full_node_api.respond_transaction(RespondTransaction(tx), fake_wallet_peer)
await time_out_assert(20, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx.name())
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
all_messages = await get_all_messages_in_queue(incoming_queue)
notified_state = None
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
notified_state = data_response
break
assert notified_state is not None
assert notified_state.items[0].coin == Coin(coin_spent.name(), hint_puzzle_hash, amount)
msg = wallet_protocol.RegisterForPhUpdates([hint], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
assert msg_response.type == ProtocolMessageTypes.respond_to_ph_update.value
data_response: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert len(data_response.coin_states) == 1
coin_records: List[CoinRecord] = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(
True, hint_puzzle_hash
)
assert len(coin_records) == 1
assert data_response.coin_states[0] == coin_records[0].coin_state
@pytest.mark.asyncio
async def test_subscribe_for_hint_long_sync(self, wallet_two_node_simulator, self_hostname):
num_blocks = 4
full_nodes, wallets, bt = wallet_two_node_simulator
full_node_api = full_nodes[0]
full_node_api_1 = full_nodes[1]
wallet_node, server_2 = wallets[0]
fn_server = full_node_api.full_node.server
fn_server_1 = full_node_api_1.full_node.server
wsm: WalletStateManager = wallet_node.wallet_state_manager
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
incoming_queue, peer_id = await add_dummy_connection(fn_server, self_hostname, 12312, NodeType.WALLET)
incoming_queue_1, peer_id_1 = await add_dummy_connection(fn_server_1, self_hostname, 12313, NodeType.WALLET)
wt: WalletTool = bt.get_pool_wallet_tool()
ph = wt.get_new_puzzlehash()
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
await asyncio.sleep(6)
coins = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hashes(False, [ph])
coin_spent = coins[0].coin
hint_puzzle_hash = 32 * b"\2"
amount = 1
amount_bin = int_to_bytes(1)
hint = 32 * b"\5"
fake_wallet_peer = fn_server.all_connections[peer_id]
fake_wallet_peer_1 = fn_server_1.all_connections[peer_id_1]
msg = wallet_protocol.RegisterForPhUpdates([hint], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
msg_response_1 = await full_node_api_1.register_interest_in_puzzle_hash(msg, fake_wallet_peer_1)
assert msg_response.type == ProtocolMessageTypes.respond_to_ph_update.value
data_response: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert len(data_response.coin_states) == 0
condition_dict = {
ConditionOpcode.CREATE_COIN: [
ConditionWithArgs(ConditionOpcode.CREATE_COIN, [hint_puzzle_hash, amount_bin, hint])
]
}
await time_out_assert(20, wallet_is_synced, True, wallet_node, full_node_api)
tx: SpendBundle = wt.generate_signed_transaction(
10,
wt.get_new_puzzlehash(),
coin_spent,
condition_dic=condition_dict,
)
await full_node_api.respond_transaction(RespondTransaction(tx), fake_wallet_peer)
await time_out_assert(20, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx.name())
# Create more blocks than recent "short_sync_blocks_behind_threshold" so that node enters batch
for i in range(0, 100):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
node1_height = full_node_api_1.full_node.blockchain.get_peak_height()
assert node1_height is None
await fn_server_1.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
node0_height = full_node_api.full_node.blockchain.get_peak_height()
await time_out_assert(60, full_node_api_1.full_node.blockchain.get_peak_height, node0_height)
all_messages = await get_all_messages_in_queue(incoming_queue)
all_messages_1 = await get_all_messages_in_queue(incoming_queue_1)
def check_messages_for_hint(messages):
notified_state = None
for message in messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
notified_state = data_response
break
assert notified_state is not None
assert notified_state.items[0].coin == Coin(coin_spent.name(), hint_puzzle_hash, amount)
check_messages_for_hint(all_messages)
check_messages_for_hint(all_messages_1)
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/rpc/test_dl_wallet_rpc.py | tests/wallet/rpc/test_dl_wallet_rpc.py | from __future__ import annotations
import asyncio
import logging
import pytest
from flax.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward
from flax.data_layer.data_layer_wallet import Mirror, SingletonRecord
from flax.rpc.wallet_rpc_client import WalletRpcClient
from flax.simulator.simulator_protocol import FarmNewBlockProtocol
from flax.simulator.time_out_assert import time_out_assert
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.peer_info import PeerInfo
from flax.util.ints import uint16, uint32, uint64
from flax.wallet.db_wallet.db_wallet_puzzles import create_mirror_puzzle
from tests.setup_nodes import SimulatorsAndWalletsServices
from tests.util.rpc import validate_get_routes
log = logging.getLogger(__name__)
class TestWalletRpc:
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
async def test_wallet_make_transaction(
self, two_wallet_nodes_services: SimulatorsAndWalletsServices, trusted: bool, self_hostname: str
) -> None:
num_blocks = 5
[full_node_service], wallet_services, bt = two_wallet_nodes_services
full_node_api = full_node_service._api
full_node_server = full_node_api.full_node.server
wallet_node = wallet_services[0]._node
server_2 = wallet_node.server
wallet_node_2 = wallet_services[1]._node
server_3 = wallet_node_2.server
wallet = wallet_node.wallet_state_manager.main_wallet
ph = await wallet.get_new_puzzlehash()
if trusted:
wallet_node.config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()}
wallet_node_2.config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()}
else:
wallet_node.config["trusted_peers"] = {}
wallet_node_2.config["trusted_peers"] = {}
await server_2.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_3.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
initial_funds = sum(
[calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks)]
)
await time_out_assert(15, wallet.get_confirmed_balance, initial_funds)
await time_out_assert(15, wallet.get_unconfirmed_balance, initial_funds)
assert wallet_services[0].rpc_server is not None
assert wallet_services[1].rpc_server is not None
client = await WalletRpcClient.create(
self_hostname,
wallet_services[0].rpc_server.listen_port,
wallet_services[0].root_path,
wallet_services[0].config,
)
await validate_get_routes(client, wallet_services[0].rpc_server.rpc_api)
client_2 = await WalletRpcClient.create(
self_hostname,
wallet_services[1].rpc_server.listen_port,
wallet_services[1].root_path,
wallet_services[1].config,
)
await validate_get_routes(client_2, wallet_services[1].rpc_server.rpc_api)
try:
merkle_root: bytes32 = bytes32([0] * 32)
txs, launcher_id = await client.create_new_dl(merkle_root, uint64(50))
for i in range(0, 5):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32([0] * 32)))
await asyncio.sleep(0.5)
async def is_singleton_confirmed(rpc_client: WalletRpcClient, lid: bytes32) -> bool:
rec = await rpc_client.dl_latest_singleton(lid)
if rec is None:
return False
return rec.confirmed
await time_out_assert(15, is_singleton_confirmed, True, client, launcher_id)
singleton_record: SingletonRecord = await client.dl_latest_singleton(launcher_id)
assert singleton_record.root == merkle_root
new_root: bytes32 = bytes32([1] * 32)
await client.dl_update_root(launcher_id, new_root, uint64(100))
for i in range(0, 5):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32([0] * 32)))
await asyncio.sleep(0.5)
new_singleton_record: SingletonRecord = await client.dl_latest_singleton(launcher_id)
assert new_singleton_record.root == new_root
assert new_singleton_record.confirmed
assert await client.dl_history(launcher_id) == [new_singleton_record, singleton_record]
await client_2.dl_track_new(launcher_id)
async def is_singleton_generation(rpc_client: WalletRpcClient, lid: bytes32, generation: int) -> bool:
if await is_singleton_confirmed(rpc_client, lid):
rec = await rpc_client.dl_latest_singleton(lid)
if rec is None:
raise Exception("No latest singleton for: {lid!r}")
return rec.generation == generation
else:
return False
await time_out_assert(15, is_singleton_generation, True, client_2, launcher_id, 1)
assert await client_2.dl_history(launcher_id) == [new_singleton_record, singleton_record]
assert await client.dl_history(launcher_id, min_generation=uint32(1)) == [new_singleton_record]
assert await client.dl_history(launcher_id, max_generation=uint32(0)) == [singleton_record]
assert await client.dl_history(launcher_id, num_results=uint32(1)) == [new_singleton_record]
assert await client.dl_history(launcher_id, num_results=uint32(2)) == [
new_singleton_record,
singleton_record,
]
assert await client.dl_history(
launcher_id,
min_generation=uint32(1),
max_generation=uint32(1),
) == [new_singleton_record]
assert await client.dl_history(
launcher_id,
max_generation=uint32(0),
num_results=uint32(1),
) == [singleton_record]
assert await client.dl_history(
launcher_id,
min_generation=uint32(1),
num_results=uint32(1),
) == [new_singleton_record]
assert await client.dl_history(
launcher_id,
min_generation=uint32(1),
max_generation=uint32(1),
num_results=uint32(1),
) == [new_singleton_record]
assert await client.dl_singletons_by_root(launcher_id, new_root) == [new_singleton_record]
txs, launcher_id_2 = await client.create_new_dl(merkle_root, uint64(50))
txs, launcher_id_3 = await client.create_new_dl(merkle_root, uint64(50))
for i in range(0, 5):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32([0] * 32)))
await asyncio.sleep(0.5)
await time_out_assert(15, is_singleton_confirmed, True, client, launcher_id_2)
await time_out_assert(15, is_singleton_confirmed, True, client, launcher_id_3)
next_root = bytes32([2] * 32)
await client.dl_update_multiple(
{
launcher_id: next_root,
launcher_id_2: next_root,
launcher_id_3: next_root,
}
)
for i in range(0, 5):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32([0] * 32)))
await asyncio.sleep(0.5)
await time_out_assert(15, is_singleton_confirmed, True, client, launcher_id)
await time_out_assert(15, is_singleton_confirmed, True, client, launcher_id_2)
await time_out_assert(15, is_singleton_confirmed, True, client, launcher_id_3)
for lid in [launcher_id, launcher_id_2, launcher_id_3]:
rec = await client.dl_latest_singleton(lid)
assert rec.root == next_root
await client_2.dl_stop_tracking(launcher_id)
assert await client_2.dl_latest_singleton(lid) is None
owned_singletons = await client.dl_owned_singletons()
owned_launcher_ids = sorted(singleton.launcher_id for singleton in owned_singletons)
assert owned_launcher_ids == sorted([launcher_id, launcher_id_2, launcher_id_3])
txs = await client.dl_new_mirror(launcher_id, uint64(1000), [b"foo", b"bar"], fee=uint64(2000000000000))
for i in range(0, 5):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32([0] * 32)))
await asyncio.sleep(0.5)
additions = []
for tx in txs:
if tx.spend_bundle is not None:
additions.extend(tx.spend_bundle.additions())
mirror_coin = [c for c in additions if c.puzzle_hash == create_mirror_puzzle().get_tree_hash()][0]
mirror = Mirror(mirror_coin.name(), launcher_id, uint64(1000), [b"foo", b"bar"], True)
await time_out_assert(15, client.dl_get_mirrors, [mirror], launcher_id)
await client.dl_delete_mirror(mirror_coin.name(), fee=uint64(2000000000000))
for i in range(0, 5):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32([0] * 32)))
await asyncio.sleep(0.5)
await time_out_assert(15, client.dl_get_mirrors, [], launcher_id)
finally:
# Checks that the RPC manages to stop the node
client.close()
client_2.close()
await client.await_closed()
await client_2.await_closed()
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/rpc/config.py | tests/wallet/rpc/config.py | from __future__ import annotations
checkout_blocks_and_plots = True
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/rpc/__init__.py | tests/wallet/rpc/__init__.py | python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false | |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/rpc/test_wallet_rpc.py | tests/wallet/rpc/test_wallet_rpc.py | from __future__ import annotations
import dataclasses
import json
import logging
from operator import attrgetter
from typing import Any, Dict, List, Optional, Tuple
import pytest
import pytest_asyncio
from blspy import G2Element
from flax.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward
from flax.consensus.coinbase import create_puzzlehash_for_pk
from flax.rpc.full_node_rpc_client import FullNodeRpcClient
from flax.rpc.wallet_rpc_client import WalletRpcClient
from flax.server.server import FlaxServer
from flax.simulator.full_node_simulator import FullNodeSimulator
from flax.simulator.simulator_protocol import FarmNewBlockProtocol
from flax.simulator.time_out_assert import time_out_assert
from flax.types.announcement import Announcement
from flax.types.blockchain_format.coin import Coin
from flax.types.blockchain_format.program import Program
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.coin_record import CoinRecord
from flax.types.coin_spend import CoinSpend
from flax.types.peer_info import PeerInfo
from flax.types.spend_bundle import SpendBundle
from flax.util.bech32m import decode_puzzle_hash, encode_puzzle_hash
from flax.util.config import lock_and_load_config, save_config
from flax.util.hash import std_hash
from flax.util.ints import uint16, uint32, uint64
from flax.wallet.cat_wallet.cat_constants import DEFAULT_CATS
from flax.wallet.cat_wallet.cat_wallet import CATWallet
from flax.wallet.derive_keys import master_sk_to_wallet_sk, master_sk_to_wallet_sk_unhardened
from flax.wallet.did_wallet.did_wallet import DIDWallet
from flax.wallet.nft_wallet.nft_wallet import NFTWallet
from flax.wallet.puzzles.cat_loader import CAT_MOD
from flax.wallet.trading.trade_status import TradeStatus
from flax.wallet.transaction_record import TransactionRecord
from flax.wallet.transaction_sorting import SortKey
from flax.wallet.uncurried_puzzle import uncurry_puzzle
from flax.wallet.util.address_type import AddressType
from flax.wallet.util.compute_memos import compute_memos
from flax.wallet.util.wallet_types import WalletType
from flax.wallet.wallet import Wallet
from flax.wallet.wallet_node import WalletNode
from flax.wallet.wallet_protocol import WalletProtocol
from tests.util.wallet_is_synced import wallet_is_synced
log = logging.getLogger(__name__)
@dataclasses.dataclass
class WalletBundle:
node: WalletNode
rpc_client: WalletRpcClient
wallet: Wallet
@dataclasses.dataclass
class FullNodeBundle:
server: FlaxServer
api: FullNodeSimulator
rpc_client: FullNodeRpcClient
@dataclasses.dataclass
class WalletRpcTestEnvironment:
wallet_1: WalletBundle
wallet_2: WalletBundle
full_node: FullNodeBundle
async def farm_transaction_block(full_node_api: FullNodeSimulator, wallet_node: WalletNode):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32(b"\00" * 32)))
await time_out_assert(20, wallet_is_synced, True, wallet_node, full_node_api)
def check_mempool_spend_count(full_node_api: FullNodeSimulator, num_of_spends):
return len(full_node_api.full_node.mempool_manager.mempool.sorted_spends) == num_of_spends
async def farm_transaction(full_node_api: FullNodeSimulator, wallet_node: WalletNode, spend_bundle: SpendBundle):
await time_out_assert(
20, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle, spend_bundle.name()
)
await farm_transaction_block(full_node_api, wallet_node)
assert full_node_api.full_node.mempool_manager.get_spendbundle(spend_bundle.name()) is None
async def generate_funds(full_node_api: FullNodeSimulator, wallet_bundle: WalletBundle, num_blocks: int = 1):
wallet_id = 1
initial_balances = await wallet_bundle.rpc_client.get_wallet_balance(str(wallet_id))
ph: bytes32 = decode_puzzle_hash(await wallet_bundle.rpc_client.get_next_address(str(wallet_id), True))
generated_funds = 0
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
peak_height = full_node_api.full_node.blockchain.get_peak_height()
assert peak_height is not None
generated_funds += calculate_pool_reward(peak_height) + calculate_base_farmer_reward(peak_height)
# Farm a dummy block to confirm the created funds
await farm_transaction_block(full_node_api, wallet_bundle.node)
expected_confirmed = initial_balances["confirmed_wallet_balance"] + generated_funds
expected_unconfirmed = initial_balances["unconfirmed_wallet_balance"] + generated_funds
await time_out_assert(20, get_confirmed_balance, expected_confirmed, wallet_bundle.rpc_client, wallet_id)
await time_out_assert(20, get_unconfirmed_balance, expected_unconfirmed, wallet_bundle.rpc_client, wallet_id)
await time_out_assert(20, wallet_bundle.rpc_client.get_synced)
return generated_funds
@pytest_asyncio.fixture(scope="function", params=[True, False])
async def wallet_rpc_environment(two_wallet_nodes_services, request, self_hostname):
full_node, wallets, bt = two_wallet_nodes_services
full_node_service = full_node[0]
full_node_api = full_node_service._api
full_node_server = full_node_api.full_node.server
wallet_service = wallets[0]
wallet_service_2 = wallets[1]
wallet_node = wallet_service._node
wallet_node_2 = wallet_service_2._node
wallet = wallet_node.wallet_state_manager.main_wallet
wallet_2 = wallet_node_2.wallet_state_manager.main_wallet
config = bt.config
hostname = config["self_hostname"]
if request.param:
wallet_node.config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()}
wallet_node_2.config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()}
else:
wallet_node.config["trusted_peers"] = {}
wallet_node_2.config["trusted_peers"] = {}
await wallet_node.server.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None)
await wallet_node_2.server.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None)
client = await WalletRpcClient.create(
hostname, wallet_service.rpc_server.listen_port, wallet_service.root_path, wallet_service.config
)
client_2 = await WalletRpcClient.create(
hostname, wallet_service_2.rpc_server.listen_port, wallet_service_2.root_path, wallet_service_2.config
)
client_node = await FullNodeRpcClient.create(
hostname, full_node_service.rpc_server.listen_port, full_node_service.root_path, full_node_service.config
)
wallet_bundle_1: WalletBundle = WalletBundle(wallet_node, client, wallet)
wallet_bundle_2: WalletBundle = WalletBundle(wallet_node_2, client_2, wallet_2)
node_bundle: FullNodeBundle = FullNodeBundle(full_node_server, full_node_api, client_node)
yield WalletRpcTestEnvironment(wallet_bundle_1, wallet_bundle_2, node_bundle)
# Checks that the RPC manages to stop the node
client.close()
client_2.close()
client_node.close()
await client.await_closed()
await client_2.await_closed()
await client_node.await_closed()
async def create_tx_outputs(wallet: Wallet, output_args: List[Tuple[int, Optional[List[str]]]]) -> List[Dict[str, Any]]:
outputs = []
for args in output_args:
output = {"amount": uint64(args[0]), "puzzle_hash": await wallet.get_new_puzzlehash()}
if args[1] is not None:
assert len(args[1]) > 0
output["memos"] = args[1]
outputs.append(output)
return outputs
async def assert_wallet_types(client: WalletRpcClient, expected: Dict[WalletType, int]) -> None:
for wallet_type in WalletType:
wallets = await client.get_wallets(wallet_type)
wallet_count = len(wallets)
if wallet_type in expected:
assert wallet_count == expected.get(wallet_type, 0)
for wallet in wallets:
assert wallet["type"] == wallet_type.value
def assert_tx_amounts(
tx: TransactionRecord,
outputs: List[Dict[str, Any]],
*,
amount_fee: uint64,
change_expected: bool,
is_cat: bool = False,
) -> None:
assert tx.fee_amount == amount_fee
assert tx.amount == sum(output["amount"] for output in outputs)
expected_additions = len(outputs) if change_expected is None else len(outputs) + 1
if is_cat and amount_fee:
expected_additions += 1
assert len(tx.additions) == expected_additions
addition_amounts = [addition.amount for addition in tx.additions]
removal_amounts = [removal.amount for removal in tx.removals]
for output in outputs:
assert output["amount"] in addition_amounts
assert (sum(removal_amounts) - sum(addition_amounts)) == amount_fee
async def assert_push_tx_error(node_rpc: FullNodeRpcClient, tx: TransactionRecord):
spend_bundle = tx.spend_bundle
assert spend_bundle is not None
# check error for a ASSERT_ANNOUNCE_CONSUMED_FAILED and if the error is not there throw a value error
try:
await node_rpc.push_tx(spend_bundle)
except ValueError as error:
error_string = error.args[0]["error"] # noqa: # pylint: disable=E1126
if error_string.find("ASSERT_ANNOUNCE_CONSUMED_FAILED") == -1:
raise ValueError from error
async def tx_in_mempool(client: WalletRpcClient, transaction_id: bytes32):
tx = await client.get_transaction("1", transaction_id)
return tx.is_in_mempool()
async def get_confirmed_balance(client: WalletRpcClient, wallet_id: int):
return (await client.get_wallet_balance(str(wallet_id)))["confirmed_wallet_balance"]
async def get_unconfirmed_balance(client: WalletRpcClient, wallet_id: int):
return (await client.get_wallet_balance(str(wallet_id)))["unconfirmed_wallet_balance"]
@pytest.mark.asyncio
async def test_send_transaction(wallet_rpc_environment: WalletRpcTestEnvironment):
env: WalletRpcTestEnvironment = wallet_rpc_environment
wallet_2: Wallet = env.wallet_2.wallet
wallet_node: WalletNode = env.wallet_1.node
full_node_api: FullNodeSimulator = env.full_node.api
client: WalletRpcClient = env.wallet_1.rpc_client
generated_funds = await generate_funds(full_node_api, env.wallet_1)
addr = encode_puzzle_hash(await wallet_2.get_new_puzzlehash(), "txfx")
tx_amount = uint64(15600000)
with pytest.raises(ValueError):
await client.send_transaction("1", uint64(100000000000000001), addr)
# Tests sending a basic transaction
tx = await client.send_transaction("1", tx_amount, addr, memos=["this is a basic tx"])
transaction_id = tx.name
spend_bundle = tx.spend_bundle
assert spend_bundle is not None
await time_out_assert(20, tx_in_mempool, True, client, transaction_id)
await time_out_assert(20, get_unconfirmed_balance, generated_funds - tx_amount, client, 1)
await farm_transaction(full_node_api, wallet_node, spend_bundle)
# Checks that the memo can be retrieved
tx_confirmed = await client.get_transaction("1", transaction_id)
assert tx_confirmed.confirmed
assert len(tx_confirmed.get_memos()) == 1
assert [b"this is a basic tx"] in tx_confirmed.get_memos().values()
assert list(tx_confirmed.get_memos().keys())[0] in [a.name() for a in spend_bundle.additions()]
await time_out_assert(20, get_confirmed_balance, generated_funds - tx_amount, client, 1)
@pytest.mark.asyncio
async def test_push_transactions(wallet_rpc_environment: WalletRpcTestEnvironment):
env: WalletRpcTestEnvironment = wallet_rpc_environment
wallet: Wallet = env.wallet_1.wallet
wallet_node: WalletNode = env.wallet_1.node
full_node_api: FullNodeSimulator = env.full_node.api
client: WalletRpcClient = env.wallet_1.rpc_client
await generate_funds(full_node_api, env.wallet_1)
outputs = await create_tx_outputs(wallet, [(1234321, None)])
tx = await client.create_signed_transaction(
outputs,
fee=uint64(100),
)
await client.push_transactions([tx])
spend_bundle = tx.spend_bundle
assert spend_bundle is not None
await farm_transaction(full_node_api, wallet_node, spend_bundle)
tx = await client.get_transaction("1", transaction_id=tx.name)
assert tx.confirmed
@pytest.mark.parametrize(
"output_args, fee, select_coin, is_cat",
[
([(348026, None)], 0, False, False),
([(1270495230, ["memo_1"]), (902347, ["memo_2"])], 1, True, False),
([(84920, ["memo_1_0", "memo_1_1"]), (1, ["memo_2_0"])], 0, False, False),
(
[(32058710, ["memo_1_0", "memo_1_1"]), (1, ["memo_2_0"]), (923, ["memo_3_0", "memo_3_1"])],
32804,
True,
False,
),
([(1337, ["LEET"]), (81000, ["pingwei"])], 817, False, True),
],
)
@pytest.mark.asyncio
async def test_create_signed_transaction(
wallet_rpc_environment: WalletRpcTestEnvironment,
output_args: List[Tuple[int, Optional[List[str]]]],
fee: int,
select_coin: bool,
is_cat: bool,
):
env: WalletRpcTestEnvironment = wallet_rpc_environment
wallet_2: Wallet = env.wallet_2.wallet
wallet_1_node: WalletNode = env.wallet_1.node
wallet_1_rpc: WalletRpcClient = env.wallet_1.rpc_client
full_node_api: FullNodeSimulator = env.full_node.api
full_node_rpc: FullNodeRpcClient = env.full_node.rpc_client
generated_funds = await generate_funds(full_node_api, env.wallet_1)
wallet_id = 1
if is_cat:
generated_funds = 10**9
res = await wallet_1_rpc.create_new_cat_and_wallet(uint64(generated_funds))
assert res["success"]
wallet_id = res["wallet_id"]
await time_out_assert(5, check_mempool_spend_count, True, full_node_api, 1)
for i in range(5):
if check_mempool_spend_count(full_node_api, 0):
break
await farm_transaction_block(full_node_api, wallet_1_node)
outputs = await create_tx_outputs(wallet_2, output_args)
amount_outputs = sum(output["amount"] for output in outputs)
amount_fee = uint64(fee)
if is_cat:
amount_total = amount_outputs
else:
amount_total = amount_outputs + amount_fee
selected_coin = None
if select_coin:
selected_coin = await wallet_1_rpc.select_coins(amount=amount_total, wallet_id=wallet_id)
assert len(selected_coin) == 1
tx = await wallet_1_rpc.create_signed_transaction(
outputs,
coins=selected_coin,
fee=amount_fee,
wallet_id=wallet_id,
)
assert_tx_amounts(tx, outputs, amount_fee=amount_fee, change_expected=not select_coin, is_cat=is_cat)
# Farm the transaction and make sure the wallet balance reflects it correct
spend_bundle = tx.spend_bundle
assert spend_bundle is not None
push_res = await full_node_rpc.push_tx(spend_bundle)
assert push_res["success"]
await farm_transaction(full_node_api, wallet_1_node, spend_bundle)
await time_out_assert(20, get_confirmed_balance, generated_funds - amount_total, wallet_1_rpc, wallet_id)
# Validate the memos
for output in outputs:
if "memos" in outputs:
found: bool = False
for addition in spend_bundle.additions():
if addition.amount == output["amount"] and addition.puzzle_hash.hex() == output["puzzle_hash"]:
cr: Optional[CoinRecord] = await full_node_rpc.get_coin_record_by_name(addition.name())
assert cr is not None
spend: Optional[CoinSpend] = await full_node_rpc.get_puzzle_and_solution(
addition.parent_coin_info, cr.confirmed_block_index
)
assert spend is not None
sb: SpendBundle = SpendBundle([spend], G2Element())
assert compute_memos(sb) == {addition.name(): [memo.encode() for memo in output["memos"]]}
found = True
assert found
@pytest.mark.asyncio
async def test_create_signed_transaction_with_coin_announcement(wallet_rpc_environment: WalletRpcTestEnvironment):
env: WalletRpcTestEnvironment = wallet_rpc_environment
wallet_2: Wallet = env.wallet_2.wallet
full_node_api: FullNodeSimulator = env.full_node.api
client: WalletRpcClient = env.wallet_1.rpc_client
client_node: FullNodeRpcClient = env.full_node.rpc_client
await generate_funds(full_node_api, env.wallet_1)
signed_tx_amount = uint64(888000)
tx_coin_announcements = [
Announcement(
std_hash(b"coin_id_1"),
std_hash(b"message"),
b"\xca",
),
Announcement(
std_hash(b"coin_id_2"),
bytes(Program.to("a string")),
),
]
outputs = await create_tx_outputs(wallet_2, [(signed_tx_amount, None)])
tx_res: TransactionRecord = await client.create_signed_transaction(
outputs, coin_announcements=tx_coin_announcements
)
assert_tx_amounts(tx_res, outputs, amount_fee=uint64(0), change_expected=False)
await assert_push_tx_error(client_node, tx_res)
@pytest.mark.asyncio
async def test_create_signed_transaction_with_puzzle_announcement(wallet_rpc_environment: WalletRpcTestEnvironment):
env: WalletRpcTestEnvironment = wallet_rpc_environment
wallet_2: Wallet = env.wallet_2.wallet
full_node_api: FullNodeSimulator = env.full_node.api
client: WalletRpcClient = env.wallet_1.rpc_client
client_node: FullNodeRpcClient = env.full_node.rpc_client
await generate_funds(full_node_api, env.wallet_1)
signed_tx_amount = uint64(888000)
tx_puzzle_announcements = [
Announcement(
std_hash(b"puzzle_hash_1"),
b"message",
b"\xca",
),
Announcement(
std_hash(b"puzzle_hash_2"),
bytes(Program.to("a string")),
),
]
outputs = await create_tx_outputs(wallet_2, [(signed_tx_amount, None)])
tx_res = await client.create_signed_transaction(outputs, puzzle_announcements=tx_puzzle_announcements)
assert_tx_amounts(tx_res, outputs, amount_fee=uint64(0), change_expected=True)
await assert_push_tx_error(client_node, tx_res)
@pytest.mark.asyncio
async def test_create_signed_transaction_with_exclude_coins(wallet_rpc_environment: WalletRpcTestEnvironment) -> None:
env: WalletRpcTestEnvironment = wallet_rpc_environment
wallet_1: Wallet = env.wallet_1.wallet
wallet_1_rpc: WalletRpcClient = env.wallet_1.rpc_client
full_node_api: FullNodeSimulator = env.full_node.api
full_node_rpc: FullNodeRpcClient = env.full_node.rpc_client
await generate_funds(full_node_api, env.wallet_1)
async def it_does_not_include_the_excluded_coins() -> None:
selected_coins = await wallet_1_rpc.select_coins(amount=250000000000, wallet_id=1)
assert len(selected_coins) == 1
outputs = await create_tx_outputs(wallet_1, [(uint64(250000000000), None)])
tx = await wallet_1_rpc.create_signed_transaction(outputs, exclude_coins=selected_coins)
assert len(tx.removals) == 1
assert tx.removals[0] != selected_coins[0]
assert tx.removals[0].amount == uint64(1750000000000)
await assert_push_tx_error(full_node_rpc, tx)
async def it_throws_an_error_when_all_spendable_coins_are_excluded() -> None:
selected_coins = await wallet_1_rpc.select_coins(amount=1750000000000, wallet_id=1)
assert len(selected_coins) == 1
outputs = await create_tx_outputs(wallet_1, [(uint64(1750000000000), None)])
with pytest.raises(ValueError):
await wallet_1_rpc.create_signed_transaction(outputs, exclude_coins=selected_coins)
await it_does_not_include_the_excluded_coins()
await it_throws_an_error_when_all_spendable_coins_are_excluded()
@pytest.mark.asyncio
async def test_send_transaction_multi(wallet_rpc_environment: WalletRpcTestEnvironment):
env: WalletRpcTestEnvironment = wallet_rpc_environment
wallet_2: Wallet = env.wallet_2.wallet
wallet_node: WalletNode = env.wallet_1.node
full_node_api: FullNodeSimulator = env.full_node.api
client: WalletRpcClient = env.wallet_1.rpc_client
generated_funds = await generate_funds(full_node_api, env.wallet_1)
outputs = await create_tx_outputs(wallet_2, [(uint64(1), ["memo_1"]), (uint64(2), ["memo_2"])])
amount_outputs = sum(output["amount"] for output in outputs)
amount_fee = uint64(amount_outputs + 1)
send_tx_res: TransactionRecord = await client.send_transaction_multi(
"1",
outputs,
fee=amount_fee,
)
spend_bundle = send_tx_res.spend_bundle
assert spend_bundle is not None
assert send_tx_res is not None
assert_tx_amounts(send_tx_res, outputs, amount_fee=amount_fee, change_expected=True)
await farm_transaction(full_node_api, wallet_node, spend_bundle)
await time_out_assert(20, get_confirmed_balance, generated_funds - amount_outputs - amount_fee, client, 1)
# Checks that the memo can be retrieved
tx_confirmed = await client.get_transaction("1", send_tx_res.name)
assert tx_confirmed.confirmed
memos = tx_confirmed.get_memos()
assert len(memos) == len(outputs)
for output in outputs:
assert [output["memos"][0].encode()] in memos.values()
spend_bundle = send_tx_res.spend_bundle
assert spend_bundle is not None
for key in memos.keys():
assert key in [a.name() for a in spend_bundle.additions()]
@pytest.mark.asyncio
async def test_get_transactions(wallet_rpc_environment: WalletRpcTestEnvironment):
env: WalletRpcTestEnvironment = wallet_rpc_environment
wallet: Wallet = env.wallet_1.wallet
wallet_node: WalletNode = env.wallet_1.node
full_node_api: FullNodeSimulator = env.full_node.api
client: WalletRpcClient = env.wallet_1.rpc_client
await generate_funds(full_node_api, env.wallet_1, 5)
all_transactions = await client.get_transactions("1")
assert len(all_transactions) >= 10
# Test transaction pagination
some_transactions = await client.get_transactions("1", 0, 5)
some_transactions_2 = await client.get_transactions("1", 5, 10)
assert some_transactions == all_transactions[0:5]
assert some_transactions_2 == all_transactions[5:10]
# Testing sorts
# Test the default sort (CONFIRMED_AT_HEIGHT)
assert all_transactions == sorted(all_transactions, key=attrgetter("confirmed_at_height"))
all_transactions = await client.get_transactions("1", reverse=True)
assert all_transactions == sorted(all_transactions, key=attrgetter("confirmed_at_height"), reverse=True)
# Test RELEVANCE
await client.send_transaction(
"1", uint64(1), encode_puzzle_hash(await wallet.get_new_puzzlehash(), "txfx")
) # Create a pending tx
all_transactions = await client.get_transactions("1", sort_key=SortKey.RELEVANCE)
sorted_transactions = sorted(all_transactions, key=attrgetter("created_at_time"), reverse=True)
sorted_transactions = sorted(sorted_transactions, key=attrgetter("confirmed_at_height"), reverse=True)
sorted_transactions = sorted(sorted_transactions, key=attrgetter("confirmed"))
assert all_transactions == sorted_transactions
all_transactions = await client.get_transactions("1", sort_key=SortKey.RELEVANCE, reverse=True)
sorted_transactions = sorted(all_transactions, key=attrgetter("created_at_time"))
sorted_transactions = sorted(sorted_transactions, key=attrgetter("confirmed_at_height"))
sorted_transactions = sorted(sorted_transactions, key=attrgetter("confirmed"), reverse=True)
assert all_transactions == sorted_transactions
# Test get_transactions to address
ph_by_addr = await wallet.get_new_puzzlehash()
await client.send_transaction("1", uint64(1), encode_puzzle_hash(ph_by_addr, "txfx"))
await client.farm_block(encode_puzzle_hash(ph_by_addr, "txfx"))
await time_out_assert(20, wallet_is_synced, True, wallet_node, full_node_api)
tx_for_address = await client.get_transactions("1", to_address=encode_puzzle_hash(ph_by_addr, "txfx"))
assert len(tx_for_address) == 1
assert tx_for_address[0].to_puzzle_hash == ph_by_addr
@pytest.mark.asyncio
async def test_get_transaction_count(wallet_rpc_environment: WalletRpcTestEnvironment):
env: WalletRpcTestEnvironment = wallet_rpc_environment
full_node_api: FullNodeSimulator = env.full_node.api
client: WalletRpcClient = env.wallet_1.rpc_client
await generate_funds(full_node_api, env.wallet_1)
all_transactions = await client.get_transactions("1")
assert len(all_transactions) > 0
transaction_count = await client.get_transaction_count("1")
assert transaction_count == len(all_transactions)
@pytest.mark.asyncio
async def test_cat_endpoints(wallet_rpc_environment: WalletRpcTestEnvironment):
env: WalletRpcTestEnvironment = wallet_rpc_environment
wallet_node: WalletNode = env.wallet_1.node
client: WalletRpcClient = env.wallet_1.rpc_client
client_2: WalletRpcClient = env.wallet_2.rpc_client
full_node_api: FullNodeSimulator = env.full_node.api
await generate_funds(full_node_api, env.wallet_1, 1)
await generate_funds(full_node_api, env.wallet_2, 1)
# Creates a CAT wallet with 100 mojos and a CAT with 20 mojos
await client.create_new_cat_and_wallet(uint64(100))
await time_out_assert(20, client.get_synced)
res = await client.create_new_cat_and_wallet(uint64(20))
assert res["success"]
cat_0_id = res["wallet_id"]
asset_id = bytes32.fromhex(res["asset_id"])
assert len(asset_id) > 0
await assert_wallet_types(client, {WalletType.STANDARD_WALLET: 1, WalletType.CAT: 2})
await assert_wallet_types(client_2, {WalletType.STANDARD_WALLET: 1})
bal_0 = await client.get_wallet_balance(cat_0_id)
assert bal_0["confirmed_wallet_balance"] == 0
assert bal_0["pending_coin_removal_count"] == 1
col = await client.get_cat_asset_id(cat_0_id)
assert col == asset_id
assert (await client.get_cat_name(cat_0_id)) == CATWallet.default_wallet_name_for_unknown_cat(asset_id.hex())
await client.set_cat_name(cat_0_id, "My cat")
assert (await client.get_cat_name(cat_0_id)) == "My cat"
result = await client.cat_asset_id_to_name(col)
assert result is not None
wid, name = result
assert wid == cat_0_id
assert name == "My cat"
result = await client.cat_asset_id_to_name(bytes32([0] * 32))
assert result is None
verified_asset_id = next(iter(DEFAULT_CATS.items()))[1]["asset_id"]
result = await client.cat_asset_id_to_name(bytes32.from_hexstr(verified_asset_id))
assert result is not None
should_be_none, name = result
assert should_be_none is None
assert name == next(iter(DEFAULT_CATS.items()))[1]["name"]
# make sure spend is in mempool before farming tx block
await time_out_assert(5, check_mempool_spend_count, True, full_node_api, 1)
for i in range(5):
if check_mempool_spend_count(full_node_api, 0):
break
await farm_transaction_block(full_node_api, wallet_node)
# check that we farmed the transaction
assert check_mempool_spend_count(full_node_api, 0)
await time_out_assert(5, wallet_is_synced, True, wallet_node, full_node_api)
await time_out_assert(5, get_confirmed_balance, 20, client, cat_0_id)
bal_0 = await client.get_wallet_balance(cat_0_id)
assert bal_0["pending_coin_removal_count"] == 0
assert bal_0["unspent_coin_count"] == 1
# Creates a second wallet with the same CAT
res = await client_2.create_wallet_for_existing_cat(asset_id)
assert res["success"]
cat_1_id = res["wallet_id"]
cat_1_asset_id = bytes.fromhex(res["asset_id"])
assert cat_1_asset_id == asset_id
await assert_wallet_types(client, {WalletType.STANDARD_WALLET: 1, WalletType.CAT: 2})
await assert_wallet_types(client_2, {WalletType.STANDARD_WALLET: 1, WalletType.CAT: 1})
await farm_transaction_block(full_node_api, wallet_node)
bal_1 = await client_2.get_wallet_balance(cat_1_id)
assert bal_1["confirmed_wallet_balance"] == 0
addr_0 = await client.get_next_address(cat_0_id, False)
addr_1 = await client_2.get_next_address(cat_1_id, False)
assert addr_0 != addr_1
# Test CAT spend without a fee
tx_res = await client.cat_spend(cat_0_id, uint64(4), addr_1, uint64(0), ["the cat memo"])
assert tx_res.wallet_id == cat_0_id
spend_bundle = tx_res.spend_bundle
assert spend_bundle is not None
assert uncurry_puzzle(spend_bundle.coin_spends[0].puzzle_reveal.to_program()).mod == CAT_MOD
await farm_transaction(full_node_api, wallet_node, spend_bundle)
await farm_transaction_block(full_node_api, wallet_node)
# Test CAT spend with a fee
tx_res = await client.cat_spend(cat_0_id, uint64(1), addr_1, uint64(5_000_000), ["the cat memo"])
assert tx_res.wallet_id == cat_0_id
spend_bundle = tx_res.spend_bundle
assert spend_bundle is not None
assert uncurry_puzzle(spend_bundle.coin_spends[0].puzzle_reveal.to_program()).mod == CAT_MOD
await farm_transaction(full_node_api, wallet_node, spend_bundle)
# Test unacknowledged CAT
await wallet_node.wallet_state_manager.interested_store.add_unacknowledged_token(
asset_id, "Unknown", uint32(10000), bytes32(b"\00" * 32)
)
cats = await client.get_stray_cats()
assert len(cats) == 1
await time_out_assert(20, get_confirmed_balance, 15, client, cat_0_id)
await time_out_assert(20, get_confirmed_balance, 5, client_2, cat_1_id)
# Test CAT coin selection
selected_coins = await client.select_coins(amount=1, wallet_id=cat_0_id)
assert len(selected_coins) > 0
@pytest.mark.asyncio
async def test_offer_endpoints(wallet_rpc_environment: WalletRpcTestEnvironment):
env: WalletRpcTestEnvironment = wallet_rpc_environment
wallet_node: WalletNode = env.wallet_1.node
wallet_1_rpc: WalletRpcClient = env.wallet_1.rpc_client
wallet_2_rpc: WalletRpcClient = env.wallet_2.rpc_client
full_node_api: FullNodeSimulator = env.full_node.api
await generate_funds(full_node_api, env.wallet_1, 1)
await generate_funds(full_node_api, env.wallet_2, 1)
# Creates a CAT wallet with 20 mojos
res = await wallet_1_rpc.create_new_cat_and_wallet(uint64(20))
assert res["success"]
cat_wallet_id = res["wallet_id"]
cat_asset_id = bytes32.fromhex(res["asset_id"])
await time_out_assert(5, check_mempool_spend_count, True, full_node_api, 1)
await farm_transaction_block(full_node_api, wallet_node)
await time_out_assert(5, wallet_is_synced, True, wallet_node, full_node_api)
await time_out_assert(5, get_confirmed_balance, 20, wallet_1_rpc, cat_wallet_id)
# Creates a wallet for the same CAT on wallet_2 and send 4 CAT from wallet_1 to it
await wallet_2_rpc.create_wallet_for_existing_cat(cat_asset_id)
wallet_2_address = await wallet_2_rpc.get_next_address(cat_wallet_id, False)
tx_res = await wallet_1_rpc.cat_spend(cat_wallet_id, uint64(4), wallet_2_address, uint64(0), ["the cat memo"])
spend_bundle = tx_res.spend_bundle
assert spend_bundle is not None
await farm_transaction(full_node_api, wallet_node, spend_bundle)
await time_out_assert(5, get_confirmed_balance, 4, wallet_2_rpc, cat_wallet_id)
# Create an offer of 5 flax for one CAT
offer, trade_record = await wallet_1_rpc.create_offer_for_ids(
{uint32(1): -5, cat_asset_id.hex(): 1}, validate_only=True
)
all_offers = await wallet_1_rpc.get_all_offers()
assert len(all_offers) == 0
assert offer is None
driver_dict: Dict[str, Any] = {cat_asset_id.hex(): {"type": "CAT", "tail": "0x" + cat_asset_id.hex()}}
offer, trade_record = await wallet_1_rpc.create_offer_for_ids(
{uint32(1): -5, cat_asset_id.hex(): 1},
driver_dict=driver_dict,
fee=uint64(1),
)
assert offer is not None
summary = await wallet_1_rpc.get_offer_summary(offer)
advanced_summary = await wallet_1_rpc.get_offer_summary(offer, advanced=True)
assert summary == {"offered": {"xfx": 5}, "requested": {cat_asset_id.hex(): 1}, "infos": driver_dict, "fees": 1}
assert advanced_summary == summary
assert await wallet_1_rpc.check_offer_validity(offer)
all_offers = await wallet_1_rpc.get_all_offers(file_contents=True)
assert len(all_offers) == 1
assert TradeStatus(all_offers[0].status) == TradeStatus.PENDING_ACCEPT
assert all_offers[0].offer == bytes(offer)
trade_record = await wallet_2_rpc.take_offer(offer, fee=uint64(1))
assert TradeStatus(trade_record.status) == TradeStatus.PENDING_CONFIRM
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | true |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/nft_wallet/test_nft_offers.py | tests/wallet/nft_wallet/test_nft_offers.py | from __future__ import annotations
from secrets import token_bytes
from typing import Any, Dict, Optional
import pytest
from flax.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward
from flax.full_node.mempool_manager import MempoolManager
from flax.simulator.full_node_simulator import FullNodeSimulator
from flax.simulator.simulator_protocol import FarmNewBlockProtocol
from flax.simulator.time_out_assert import time_out_assert, time_out_assert_not_none
from flax.types.blockchain_format.program import Program
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.peer_info import PeerInfo
from flax.util.ints import uint16, uint32, uint64
from flax.wallet.cat_wallet.cat_wallet import CATWallet
from flax.wallet.nft_wallet.nft_wallet import NFTWallet
from flax.wallet.outer_puzzles import create_asset_id, match_puzzle
from flax.wallet.puzzle_drivers import PuzzleInfo
from flax.wallet.trading.offer import Offer
from flax.wallet.trading.trade_status import TradeStatus
from flax.wallet.uncurried_puzzle import uncurry_puzzle
from tests.util.wallet_is_synced import wallets_are_synced
from tests.wallet.nft_wallet.test_nft_1_offers import mempool_not_empty
async def tx_in_pool(mempool: MempoolManager, tx_id: bytes32) -> bool:
tx = mempool.get_spendbundle(tx_id)
if tx is None:
return False
return True
async def get_trade_and_status(trade_manager, trade) -> TradeStatus: # type: ignore
trade_rec = await trade_manager.get_trade_by_id(trade.trade_id)
return TradeStatus(trade_rec.status)
@pytest.mark.parametrize(
"trusted",
[False],
)
@pytest.mark.asyncio
async def test_nft_offer_with_fee(two_wallet_nodes: Any, trusted: Any) -> None:
full_nodes, wallets, _ = two_wallet_nodes
full_node_api: FullNodeSimulator = full_nodes[0]
full_node_server = full_node_api.server
wallet_node_0, server_0 = wallets[0]
wallet_node_1, server_1 = wallets[1]
wallet_maker = wallet_node_0.wallet_state_manager.main_wallet
wallet_taker = wallet_node_1.wallet_state_manager.main_wallet
maker_ph = await wallet_maker.get_new_puzzlehash()
taker_ph = await wallet_taker.get_new_puzzlehash()
token_ph = bytes32(token_bytes())
if trusted:
wallet_node_0.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
wallet_node_1.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
else:
wallet_node_0.config["trusted_peers"] = {}
wallet_node_1.config["trusted_peers"] = {}
await server_0.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_1.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(maker_ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(taker_ph))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_0, wallet_node_1], full_node_api)
funds = sum([calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, 2)])
await time_out_assert(20, wallet_maker.get_unconfirmed_balance, funds)
await time_out_assert(20, wallet_maker.get_confirmed_balance, funds)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(token_ph))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_0, wallet_node_1], full_node_api)
nft_wallet_maker = await NFTWallet.create_new_nft_wallet(
wallet_node_0.wallet_state_manager, wallet_maker, name="NFT WALLET 1"
)
nft_wallet_taker = await NFTWallet.create_new_nft_wallet(
wallet_node_1.wallet_state_manager, wallet_taker, name="NFT WALLET 2"
)
trade_manager_maker = wallet_maker.wallet_state_manager.trade_manager
trade_manager_taker = wallet_taker.wallet_state_manager.trade_manager
metadata = Program.to(
[
("u", ["https://www.flaxnetwork.org/img/branding/flax-logo.svg"]),
("h", "0xD4584AD463139FA8C0D9F68F4B59F185"),
]
)
sb = await nft_wallet_maker.generate_new_nft(metadata)
assert sb
await time_out_assert_not_none(20, full_node_api.full_node.mempool_manager.get_spendbundle, sb.name())
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(token_ph))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_0, wallet_node_1], full_node_api)
coins_maker = await nft_wallet_maker.get_current_nfts()
assert len(coins_maker) == 1
coins_taker = await nft_wallet_taker.get_current_nfts()
assert len(coins_taker) == 0
# MAKE FIRST TRADE: 1 NFT for 100 xfx
maker_balance_pre = await wallet_maker.get_confirmed_balance()
taker_balance_pre = await wallet_taker.get_confirmed_balance()
nft_to_offer = coins_maker[0]
nft_info: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(nft_to_offer.full_puzzle))
nft_asset_id: bytes32 = create_asset_id(nft_info) # type: ignore
driver_dict: Dict[bytes32, Optional[PuzzleInfo]] = {nft_asset_id: nft_info}
xfx_request = 100
maker_fee = uint64(10)
offer_nft_for_xfx = {wallet_maker.id(): xfx_request, nft_asset_id: -1}
success, trade_make, error = await trade_manager_maker.create_offer_for_ids(
offer_nft_for_xfx, driver_dict, fee=maker_fee
)
assert success is True
assert error is None
assert trade_make is not None
taker_fee = uint64(1)
peer = wallet_node_1.get_full_node_peer()
assert peer is not None
success, trade_take, error = await trade_manager_taker.respond_to_offer(
Offer.from_bytes(trade_make.offer), peer, fee=taker_fee
)
sb_id = Offer.from_bytes(trade_take.offer).to_valid_spend().name()
await time_out_assert_not_none(20, full_node_api.full_node.mempool_manager.get_spendbundle, sb_id)
assert success
assert error is None
assert trade_take is not None
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(token_ph))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_0, wallet_node_1], full_node_api)
await time_out_assert(20, get_trade_and_status, TradeStatus.CONFIRMED, trade_manager_maker, trade_make)
await time_out_assert(20, get_trade_and_status, TradeStatus.CONFIRMED, trade_manager_taker, trade_take)
await time_out_assert(20, wallet_maker.get_confirmed_balance, maker_balance_pre + xfx_request - maker_fee)
await time_out_assert(20, wallet_taker.get_confirmed_balance, taker_balance_pre - xfx_request - taker_fee)
coins_maker = await nft_wallet_maker.get_current_nfts()
coins_taker = await nft_wallet_taker.get_current_nfts()
assert len(coins_maker) == 0
assert len(coins_taker) == 1
# MAKE SECOND TRADE: 100 xfx for 1 NFT
maker_balance_pre = await wallet_maker.get_confirmed_balance()
taker_balance_pre = await wallet_taker.get_confirmed_balance()
nft_to_buy = coins_taker[0]
nft_to_buy_info: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(nft_to_buy.full_puzzle))
nft_to_buy_asset_id: bytes32 = create_asset_id(nft_to_buy_info) # type: ignore
driver_dict_to_buy: Dict[bytes32, Optional[PuzzleInfo]] = {nft_to_buy_asset_id: nft_to_buy_info}
xfx_offered = 1000
maker_fee = uint64(10)
offer_xfx_for_nft = {wallet_maker.id(): -xfx_offered, nft_to_buy_asset_id: 1}
success, trade_make, error = await trade_manager_maker.create_offer_for_ids(
offer_xfx_for_nft, driver_dict_to_buy, fee=maker_fee
)
assert success is True
assert error is None
assert trade_make is not None
taker_fee = uint64(1)
success, trade_take, error = await trade_manager_taker.respond_to_offer(
Offer.from_bytes(trade_make.offer), peer, fee=taker_fee
)
sb_id = Offer.from_bytes(trade_take.offer).to_valid_spend().name()
await time_out_assert_not_none(20, full_node_api.full_node.mempool_manager.get_spendbundle, sb_id)
assert success
assert error is None
assert trade_take is not None
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(token_ph))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_0, wallet_node_1], full_node_api)
await time_out_assert(20, get_trade_and_status, TradeStatus.CONFIRMED, trade_manager_maker, trade_make)
await time_out_assert(20, get_trade_and_status, TradeStatus.CONFIRMED, trade_manager_taker, trade_take)
await time_out_assert(20, wallet_maker.get_confirmed_balance, maker_balance_pre - xfx_offered - maker_fee)
await time_out_assert(20, wallet_taker.get_confirmed_balance, taker_balance_pre + xfx_offered - taker_fee)
coins_maker = await nft_wallet_maker.get_current_nfts()
coins_taker = await nft_wallet_taker.get_current_nfts()
assert len(coins_maker) == 1
assert len(coins_taker) == 0
@pytest.mark.parametrize(
"trusted",
[False],
)
@pytest.mark.asyncio
async def test_nft_offer_cancellations(two_wallet_nodes: Any, trusted: Any) -> None:
full_nodes, wallets, _ = two_wallet_nodes
full_node_api: FullNodeSimulator = full_nodes[0]
full_node_server = full_node_api.server
wallet_node_0, server_0 = wallets[0]
wallet_node_1, server_1 = wallets[1]
wallet_maker = wallet_node_0.wallet_state_manager.main_wallet
wallet_taker = wallet_node_1.wallet_state_manager.main_wallet
maker_ph = await wallet_maker.get_new_puzzlehash()
taker_ph = await wallet_taker.get_new_puzzlehash()
token_ph = bytes32(token_bytes())
if trusted:
wallet_node_0.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
wallet_node_1.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
else:
wallet_node_0.config["trusted_peers"] = {}
wallet_node_1.config["trusted_peers"] = {}
await server_0.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_1.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(maker_ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(taker_ph))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_0, wallet_node_1], full_node_api)
funds = sum([calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, 2)])
await time_out_assert(20, wallet_maker.get_unconfirmed_balance, funds)
await time_out_assert(20, wallet_maker.get_confirmed_balance, funds)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(token_ph))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_0, wallet_node_1], full_node_api)
nft_wallet_maker = await NFTWallet.create_new_nft_wallet(
wallet_node_0.wallet_state_manager, wallet_maker, name="NFT WALLET 1"
)
nft_wallet_taker = await NFTWallet.create_new_nft_wallet(
wallet_node_1.wallet_state_manager, wallet_taker, name="NFT WALLET 2"
)
trade_manager_maker = wallet_maker.wallet_state_manager.trade_manager
# trade_manager_taker = wallet_taker.wallet_state_manager.trade_manager
metadata = Program.to(
[
("u", ["https://www.flaxnetwork.org/img/branding/flax-logo.svg"]),
("h", "0xD4584AD463139FA8C0D9F68F4B59F185"),
]
)
sb = await nft_wallet_maker.generate_new_nft(metadata)
assert sb
await time_out_assert_not_none(20, full_node_api.full_node.mempool_manager.get_spendbundle, sb.name())
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(token_ph))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_0, wallet_node_1], full_node_api)
coins_maker = await nft_wallet_maker.get_current_nfts()
assert len(coins_maker) == 1
coins_taker = await nft_wallet_taker.get_current_nfts()
assert len(coins_taker) == 0
# maker creates offer and cancels
maker_balance_pre = await wallet_maker.get_confirmed_balance()
# taker_balance_pre = await wallet_taker.get_confirmed_balance()
nft_to_offer = coins_maker[0]
nft_info: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(nft_to_offer.full_puzzle))
nft_asset_id: bytes32 = create_asset_id(nft_info) # type: ignore
driver_dict: Dict[bytes32, Optional[PuzzleInfo]] = {nft_asset_id: nft_info}
xfx_request = 100
maker_fee = uint64(10)
offer_nft_for_xfx = {wallet_maker.id(): xfx_request, nft_asset_id: -1}
success, trade_make, error = await trade_manager_maker.create_offer_for_ids(
offer_nft_for_xfx, driver_dict, fee=maker_fee
)
assert success is True
assert error is None
assert trade_make is not None
# await trade_manager_maker.cancel_pending_offer(trade_make.trade_id)
# await time_out_assert(20, get_trade_and_status, TradeStatus.CANCELLED, trade_manager_maker, trade_make)
cancel_fee = uint64(10)
txs = await trade_manager_maker.cancel_pending_offer_safely(trade_make.trade_id, fee=cancel_fee)
await time_out_assert(20, get_trade_and_status, TradeStatus.PENDING_CANCEL, trade_manager_maker, trade_make)
for tx in txs:
if tx.spend_bundle is not None:
await time_out_assert(20, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx.spend_bundle.name())
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(token_ph))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_0, wallet_node_1], full_node_api)
await time_out_assert(20, get_trade_and_status, TradeStatus.CANCELLED, trade_manager_maker, trade_make)
maker_balance = await wallet_maker.get_confirmed_balance()
assert maker_balance == maker_balance_pre - cancel_fee
coins_maker = await nft_wallet_maker.get_current_nfts()
assert len(coins_maker) == 1
@pytest.mark.parametrize(
"trusted",
[False],
)
@pytest.mark.asyncio
async def test_nft_offer_with_metadata_update(two_wallet_nodes: Any, trusted: Any) -> None:
full_nodes, wallets, _ = two_wallet_nodes
full_node_api: FullNodeSimulator = full_nodes[0]
full_node_server = full_node_api.server
wallet_node_0, server_0 = wallets[0]
wallet_node_1, server_1 = wallets[1]
wallet_maker = wallet_node_0.wallet_state_manager.main_wallet
wallet_taker = wallet_node_1.wallet_state_manager.main_wallet
maker_ph = await wallet_maker.get_new_puzzlehash()
taker_ph = await wallet_taker.get_new_puzzlehash()
token_ph = bytes32(token_bytes())
if trusted:
wallet_node_0.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
wallet_node_1.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
else:
wallet_node_0.config["trusted_peers"] = {}
wallet_node_1.config["trusted_peers"] = {}
await server_0.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_1.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(maker_ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(taker_ph))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_0, wallet_node_1], full_node_api)
funds = sum([calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, 2)])
await time_out_assert(20, wallet_maker.get_unconfirmed_balance, funds)
await time_out_assert(20, wallet_maker.get_confirmed_balance, funds)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(token_ph))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_0, wallet_node_1], full_node_api)
nft_wallet_maker = await NFTWallet.create_new_nft_wallet(
wallet_node_0.wallet_state_manager, wallet_maker, name="NFT WALLET 1"
)
nft_wallet_taker = await NFTWallet.create_new_nft_wallet(
wallet_node_1.wallet_state_manager, wallet_taker, name="NFT WALLET 2"
)
trade_manager_maker = wallet_maker.wallet_state_manager.trade_manager
trade_manager_taker = wallet_taker.wallet_state_manager.trade_manager
metadata = Program.to(
[
("u", ["https://www.flaxnetwork.org/img/branding/flax-logo.svg"]),
("h", "0xD4584AD463139FA8C0D9F68F4B59F185"),
("mu", []),
("lu", []),
("sn", uint64(1)),
("st", uint64(1)),
]
)
sb = await nft_wallet_maker.generate_new_nft(metadata)
assert sb
await time_out_assert_not_none(20, full_node_api.full_node.mempool_manager.get_spendbundle, sb.name())
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(token_ph))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_0, wallet_node_1], full_node_api)
coins_maker = await nft_wallet_maker.get_current_nfts()
assert len(coins_maker) == 1
coins_taker = await nft_wallet_taker.get_current_nfts()
assert len(coins_taker) == 0
# Maker updates metadata:
nft_to_update = coins_maker[0]
url_to_add = "https://new_url.com"
key = "mu"
fee_for_update = uint64(10)
update_sb = await nft_wallet_maker.update_metadata(nft_to_update, key, url_to_add, fee=fee_for_update)
mempool_mgr = full_node_api.full_node.mempool_manager
await time_out_assert_not_none(20, mempool_mgr.get_spendbundle, update_sb.name()) # type: ignore
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(token_ph))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_0, wallet_node_1], full_node_api)
coins_maker = await nft_wallet_maker.get_current_nfts()
updated_nft = coins_maker[0]
updated_nft_info = match_puzzle(uncurry_puzzle(updated_nft.full_puzzle))
assert url_to_add in updated_nft_info.also().info["metadata"] # type: ignore
# MAKE FIRST TRADE: 1 NFT for 100 xfx
maker_balance_pre = await wallet_maker.get_confirmed_balance()
taker_balance_pre = await wallet_taker.get_confirmed_balance()
nft_to_offer = coins_maker[0]
nft_info: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(nft_to_offer.full_puzzle))
nft_asset_id: bytes32 = create_asset_id(nft_info) # type: ignore
driver_dict: Dict[bytes32, Optional[PuzzleInfo]] = {nft_asset_id: nft_info}
xfx_request = 100
maker_fee = uint64(10)
offer_nft_for_xfx = {wallet_maker.id(): xfx_request, nft_asset_id: -1}
success, trade_make, error = await trade_manager_maker.create_offer_for_ids(
offer_nft_for_xfx, driver_dict, fee=maker_fee
)
assert success is True
assert error is None
assert trade_make is not None
taker_fee = uint64(1)
peer = wallet_node_1.get_full_node_peer()
assert peer is not None
success, trade_take, error = await trade_manager_taker.respond_to_offer(
Offer.from_bytes(trade_make.offer), peer, fee=taker_fee
)
await time_out_assert(20, mempool_not_empty, True, full_node_api)
assert success
assert error is None
assert trade_take is not None
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(token_ph))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_0, wallet_node_1], full_node_api)
await time_out_assert(20, get_trade_and_status, TradeStatus.CONFIRMED, trade_manager_maker, trade_make)
await time_out_assert(20, get_trade_and_status, TradeStatus.CONFIRMED, trade_manager_taker, trade_take)
await time_out_assert(20, wallet_maker.get_confirmed_balance, maker_balance_pre + xfx_request - maker_fee)
await time_out_assert(20, wallet_taker.get_confirmed_balance, taker_balance_pre - xfx_request - taker_fee)
coins_maker = await nft_wallet_maker.get_current_nfts()
coins_taker = await nft_wallet_taker.get_current_nfts()
assert len(coins_maker) == 0
assert len(coins_taker) == 1
@pytest.mark.parametrize(
"trusted",
[False],
)
@pytest.mark.asyncio
async def test_nft_offer_nft_for_cat(two_wallet_nodes: Any, trusted: Any) -> None:
full_nodes, wallets, _ = two_wallet_nodes
full_node_api: FullNodeSimulator = full_nodes[0]
full_node_server = full_node_api.server
wallet_node_0, server_0 = wallets[0]
wallet_node_1, server_1 = wallets[1]
wallet_maker = wallet_node_0.wallet_state_manager.main_wallet
wallet_taker = wallet_node_1.wallet_state_manager.main_wallet
maker_ph = await wallet_maker.get_new_puzzlehash()
taker_ph = await wallet_taker.get_new_puzzlehash()
token_ph = bytes32(token_bytes())
if trusted:
wallet_node_0.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
wallet_node_1.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
else:
wallet_node_0.config["trusted_peers"] = {}
wallet_node_1.config["trusted_peers"] = {}
await server_0.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_1.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(maker_ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(taker_ph))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_0, wallet_node_1], full_node_api)
funds = sum([calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, 2)])
await time_out_assert(20, wallet_maker.get_unconfirmed_balance, funds)
await time_out_assert(20, wallet_maker.get_confirmed_balance, funds)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(token_ph))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_0, wallet_node_1], full_node_api)
# Create NFT wallets and nfts for maker and taker
nft_wallet_maker = await NFTWallet.create_new_nft_wallet(
wallet_node_0.wallet_state_manager, wallet_maker, name="NFT WALLET 1"
)
nft_wallet_taker = await NFTWallet.create_new_nft_wallet(
wallet_node_1.wallet_state_manager, wallet_taker, name="NFT WALLET 2"
)
trade_manager_maker = wallet_maker.wallet_state_manager.trade_manager
trade_manager_taker = wallet_taker.wallet_state_manager.trade_manager
metadata = Program.to(
[
("u", ["https://www.flaxnetwork.org/img/branding/flax-logo.svg"]),
("h", "0xD4584AD463139FA8C0D9F68F4B59F185"),
]
)
sb = await nft_wallet_maker.generate_new_nft(metadata)
assert sb
await time_out_assert_not_none(20, full_node_api.full_node.mempool_manager.get_spendbundle, sb.name())
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(token_ph))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_0, wallet_node_1], full_node_api)
coins_maker = await nft_wallet_maker.get_current_nfts()
assert len(coins_maker) == 1
coins_taker = await nft_wallet_taker.get_current_nfts()
assert len(coins_taker) == 0
# Create two new CATs and wallets for maker and taker
cats_to_mint = 10000
async with wallet_node_0.wallet_state_manager.lock:
cat_wallet_maker: CATWallet = await CATWallet.create_new_cat_wallet(
wallet_node_0.wallet_state_manager, wallet_maker, {"identifier": "genesis_by_id"}, uint64(cats_to_mint)
)
await time_out_assert(20, mempool_not_empty, True, full_node_api)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(token_ph))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_0, wallet_node_1], full_node_api)
async with wallet_node_1.wallet_state_manager.lock:
cat_wallet_taker: CATWallet = await CATWallet.create_new_cat_wallet(
wallet_node_1.wallet_state_manager, wallet_taker, {"identifier": "genesis_by_id"}, uint64(cats_to_mint)
)
await time_out_assert(20, mempool_not_empty, True, full_node_api)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(token_ph))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_0, wallet_node_1], full_node_api)
await time_out_assert(20, cat_wallet_maker.get_confirmed_balance, cats_to_mint)
await time_out_assert(20, cat_wallet_maker.get_unconfirmed_balance, cats_to_mint)
await time_out_assert(20, cat_wallet_taker.get_confirmed_balance, cats_to_mint)
await time_out_assert(20, cat_wallet_taker.get_unconfirmed_balance, cats_to_mint)
wallet_maker_for_taker_cat: CATWallet = await CATWallet.create_wallet_for_cat(
wallet_node_0.wallet_state_manager, wallet_maker, cat_wallet_taker.get_asset_id()
)
wallet_taker_for_maker_cat: CATWallet = await CATWallet.create_wallet_for_cat(
wallet_node_1.wallet_state_manager, wallet_taker, cat_wallet_maker.get_asset_id()
)
assert wallet_taker_for_maker_cat
# MAKE FIRST TRADE: 1 NFT for 10 taker cats
maker_balance_pre = await wallet_maker.get_confirmed_balance()
taker_balance_pre = await wallet_taker.get_confirmed_balance()
taker_cat_maker_balance_pre = await wallet_maker_for_taker_cat.get_confirmed_balance()
taker_cat_taker_balance_pre = await cat_wallet_taker.get_confirmed_balance()
nft_to_offer = coins_maker[0]
nft_info: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(nft_to_offer.full_puzzle))
nft_asset_id: bytes32 = create_asset_id(nft_info) # type: ignore
driver_dict: Dict[bytes32, Optional[PuzzleInfo]] = {nft_asset_id: nft_info}
maker_fee = uint64(10)
taker_cat_offered = 2500
offer_nft_for_cat = {nft_asset_id: -1, wallet_maker_for_taker_cat.id(): taker_cat_offered}
success, trade_make, error = await trade_manager_maker.create_offer_for_ids(
offer_nft_for_cat, driver_dict, fee=maker_fee
)
assert success is True
assert error is None
assert trade_make is not None
taker_fee = uint64(1)
peer = wallet_node_1.get_full_node_peer()
assert peer is not None
success, trade_take, error = await trade_manager_taker.respond_to_offer(
Offer.from_bytes(trade_make.offer), peer, fee=taker_fee
)
await time_out_assert(20, mempool_not_empty, True, full_node_api)
assert success
assert error is None
assert trade_take is not None
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(token_ph))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_0, wallet_node_1], full_node_api)
await time_out_assert(20, get_trade_and_status, TradeStatus.CONFIRMED, trade_manager_maker, trade_make)
await time_out_assert(20, get_trade_and_status, TradeStatus.CONFIRMED, trade_manager_taker, trade_take)
taker_cat_maker_balance_post = await wallet_maker_for_taker_cat.get_confirmed_balance()
taker_cat_taker_balance_post = await cat_wallet_taker.get_confirmed_balance()
assert taker_cat_maker_balance_post == taker_cat_maker_balance_pre + taker_cat_offered
assert taker_cat_taker_balance_post == taker_cat_taker_balance_pre - taker_cat_offered
maker_balance_post = await wallet_maker.get_confirmed_balance()
taker_balance_post = await wallet_taker.get_confirmed_balance()
assert maker_balance_post == maker_balance_pre - maker_fee
assert taker_balance_post == taker_balance_pre - taker_fee
coins_maker = await nft_wallet_maker.get_current_nfts()
coins_taker = await nft_wallet_taker.get_current_nfts()
assert len(coins_maker) == 0
assert len(coins_taker) == 1
# Make an offer for taker NFT for multiple cats
maker_cat_amount = 400
taker_cat_amount = 500
nft_to_buy = coins_taker[0]
nft_to_buy_info: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(nft_to_buy.full_puzzle))
nft_to_buy_asset_id: bytes32 = create_asset_id(nft_to_buy_info) # type: ignore
driver_dict_to_buy: Dict[bytes32, Optional[PuzzleInfo]] = {
nft_to_buy_asset_id: nft_to_buy_info,
}
maker_fee = uint64(10)
offer_multi_cats_for_nft = {
nft_to_buy_asset_id: 1,
wallet_maker_for_taker_cat.id(): -taker_cat_amount,
cat_wallet_maker.id(): -maker_cat_amount,
}
success, trade_make, error = await trade_manager_maker.create_offer_for_ids(
offer_multi_cats_for_nft, driver_dict_to_buy, fee=maker_fee
)
assert success is True
assert error is None
assert trade_make is not None
taker_fee = uint64(1)
success, trade_take, error = await trade_manager_taker.respond_to_offer(
Offer.from_bytes(trade_make.offer), peer, fee=taker_fee
)
await time_out_assert(20, mempool_not_empty, True, full_node_api)
assert success
assert error is None
assert trade_take is not None
# check balances: taker wallet down an NFT, up cats
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(token_ph))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_0, wallet_node_1], full_node_api)
await time_out_assert(20, get_trade_and_status, TradeStatus.CONFIRMED, trade_manager_maker, trade_make)
await time_out_assert(20, get_trade_and_status, TradeStatus.CONFIRMED, trade_manager_taker, trade_take)
taker_cat_maker_balance_post_2 = await wallet_maker_for_taker_cat.get_confirmed_balance()
taker_cat_taker_balance_post_2 = await cat_wallet_taker.get_confirmed_balance()
assert taker_cat_maker_balance_post_2 == taker_cat_maker_balance_post - taker_cat_amount
assert taker_cat_taker_balance_post_2 == taker_cat_taker_balance_post + taker_cat_amount
maker_balance_post_2 = await wallet_maker.get_confirmed_balance()
taker_balance_post_2 = await wallet_taker.get_confirmed_balance()
assert maker_balance_post_2 == maker_balance_post - maker_fee
assert taker_balance_post_2 == taker_balance_post - taker_fee
coins_maker = await nft_wallet_maker.get_current_nfts()
coins_taker = await nft_wallet_taker.get_current_nfts()
assert len(coins_maker) == 1
assert len(coins_taker) == 0
@pytest.mark.parametrize(
"trusted",
[False],
)
@pytest.mark.asyncio
async def test_nft_offer_nft_for_nft(two_wallet_nodes: Any, trusted: Any) -> None:
full_nodes, wallets, _ = two_wallet_nodes
full_node_api: FullNodeSimulator = full_nodes[0]
full_node_server = full_node_api.server
wallet_node_0, server_0 = wallets[0]
wallet_node_1, server_1 = wallets[1]
wallet_maker = wallet_node_0.wallet_state_manager.main_wallet
wallet_taker = wallet_node_1.wallet_state_manager.main_wallet
maker_ph = await wallet_maker.get_new_puzzlehash()
taker_ph = await wallet_taker.get_new_puzzlehash()
token_ph = bytes32(token_bytes())
if trusted:
wallet_node_0.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
wallet_node_1.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
else:
wallet_node_0.config["trusted_peers"] = {}
wallet_node_1.config["trusted_peers"] = {}
await server_0.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_1.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(maker_ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(taker_ph))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_0, wallet_node_1], full_node_api)
funds = sum([calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, 2)])
await time_out_assert(20, wallet_maker.get_unconfirmed_balance, funds)
await time_out_assert(20, wallet_maker.get_confirmed_balance, funds)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(token_ph))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_0, wallet_node_1], full_node_api)
# Create NFT wallets and nfts for maker and taker
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | true |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/nft_wallet/test_nft_bulk_mint.py | tests/wallet/nft_wallet/test_nft_bulk_mint.py | from __future__ import annotations
import asyncio
from secrets import token_bytes
from typing import Any, Dict
import pytest
from flax.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward
from flax.rpc.full_node_rpc_api import FullNodeRpcApi
from flax.rpc.full_node_rpc_client import FullNodeRpcClient
from flax.rpc.rpc_server import start_rpc_server
from flax.rpc.wallet_rpc_api import WalletRpcApi
from flax.rpc.wallet_rpc_client import WalletRpcClient
from flax.simulator.full_node_simulator import FullNodeSimulator
from flax.simulator.simulator_protocol import FarmNewBlockProtocol
from flax.simulator.time_out_assert import time_out_assert, time_out_assert_not_none
from flax.types.blockchain_format.program import Program
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.peer_info import PeerInfo
from flax.types.spend_bundle import SpendBundle
from flax.util.bech32m import encode_puzzle_hash
from flax.util.ints import uint16, uint32, uint64
from flax.wallet.did_wallet.did_wallet import DIDWallet
from flax.wallet.nft_wallet.nft_wallet import NFTWallet
from flax.wallet.util.address_type import AddressType
async def nft_count(wallet: NFTWallet) -> int:
nfts = await wallet.nft_store.get_nft_list()
return len(nfts)
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
async def test_nft_mint_from_did(two_wallet_nodes: Any, trusted: Any) -> None:
full_nodes, wallets, _ = two_wallet_nodes
full_node_api: FullNodeSimulator = full_nodes[0]
full_node_server = full_node_api.server
wallet_node_0, server_0 = wallets[0]
wallet_node_1, server_1 = wallets[1]
wallet_0 = wallet_node_0.wallet_state_manager.main_wallet
wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
api_0 = WalletRpcApi(wallet_node_0)
ph_maker = await wallet_0.get_new_puzzlehash()
ph_taker = await wallet_1.get_new_puzzlehash()
ph_token = bytes32(token_bytes())
if trusted:
wallet_node_0.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
wallet_node_1.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
else:
wallet_node_0.config["trusted_peers"] = {}
wallet_node_1.config["trusted_peers"] = {}
await server_0.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_1.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
# for _ in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_maker))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
funds = calculate_pool_reward(uint32(1)) + calculate_base_farmer_reward(uint32(1))
await time_out_assert(30, wallet_0.get_unconfirmed_balance, funds)
await time_out_assert(30, wallet_0.get_confirmed_balance, funds)
did_wallet: DIDWallet = await DIDWallet.create_new_did_wallet(
wallet_node_0.wallet_state_manager, wallet_0, uint64(1)
)
spend_bundle_list = await wallet_node_0.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(did_wallet.id())
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await time_out_assert(30, wallet_0.get_pending_change_balance, 0)
hex_did_id = did_wallet.get_my_DID()
did_id = bytes32.fromhex(hex_did_id)
await time_out_assert(5, did_wallet.get_confirmed_balance, 1)
nft_wallet_maker = await NFTWallet.create_new_nft_wallet(
wallet_node_0.wallet_state_manager, wallet_0, name="NFT WALLET 1", did_id=did_id
)
nft_wallet_taker = await NFTWallet.create_new_nft_wallet(
wallet_node_1.wallet_state_manager, wallet_1, name="NFT WALLET 2"
)
# construct sample metadata
metadata = Program.to(
[
("u", ["https://www.flaxnetwork.org/img/branding/flax-logo.svg"]),
("h", "0xD4584AD463139FA8C0D9F68F4B59F185"),
]
)
royalty_pc = uint16(300)
royalty_addr = ph_maker
mint_total = 1
fee = uint64(100)
metadata_list = [
{"program": metadata, "royalty_pc": royalty_pc, "royalty_ph": royalty_addr} for x in range(mint_total)
]
target_list = [ph_taker for x in range(mint_total)]
sb = await nft_wallet_maker.mint_from_did(
metadata_list, target_list=target_list, mint_number_start=1, mint_total=mint_total, fee=fee
)
await api_0.push_tx({"spend_bundle": bytes(sb).hex()})
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, sb.name())
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await time_out_assert(30, nft_count, mint_total, nft_wallet_taker)
await time_out_assert(30, nft_count, 0, nft_wallet_maker)
expected_xfx_bal = funds - fee - mint_total - 1
await time_out_assert(30, wallet_0.get_confirmed_balance, expected_xfx_bal)
assert (await nft_wallet_taker.get_current_nfts())[0].minter_did == did_id
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
async def test_nft_mint_from_did_rpc(two_wallet_nodes: Any, trusted: Any, self_hostname: str) -> None:
full_nodes, wallets, _ = two_wallet_nodes
full_node_api: FullNodeSimulator = full_nodes[0]
bt = full_node_api.bt
full_node_server = full_node_api.server
wallet_node_maker, server_0 = wallets[0]
wallet_node_taker, server_1 = wallets[1]
wallet_maker = wallet_node_maker.wallet_state_manager.main_wallet
wallet_taker = wallet_node_taker.wallet_state_manager.main_wallet
ph_maker = await wallet_maker.get_new_puzzlehash()
ph_taker = await wallet_taker.get_new_puzzlehash()
ph_token = bytes32(token_bytes())
if trusted:
wallet_node_maker.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
wallet_node_taker.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
else:
wallet_node_maker.config["trusted_peers"] = {}
wallet_node_taker.config["trusted_peers"] = {}
await server_0.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_1.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_maker))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_taker))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
funds = calculate_pool_reward(uint32(1)) + calculate_base_farmer_reward(uint32(1))
await time_out_assert(30, wallet_maker.get_unconfirmed_balance, funds)
await time_out_assert(30, wallet_maker.get_confirmed_balance, funds)
await time_out_assert(30, wallet_taker.get_unconfirmed_balance, funds)
await time_out_assert(30, wallet_taker.get_confirmed_balance, funds)
api_maker = WalletRpcApi(wallet_node_maker)
api_taker = WalletRpcApi(wallet_node_taker)
config = bt.config
daemon_port = config["daemon_port"]
def stop_node_cb() -> None:
pass
full_node_rpc_api = FullNodeRpcApi(full_node_api.full_node)
rpc_server_node = await start_rpc_server(
full_node_rpc_api,
self_hostname,
daemon_port,
uint16(0),
stop_node_cb,
bt.root_path,
config,
connect_to_daemon=False,
)
rpc_server = await start_rpc_server(
api_maker,
self_hostname,
daemon_port,
uint16(0),
lambda x: None, # type: ignore
bt.root_path,
config,
connect_to_daemon=False,
)
client = await WalletRpcClient.create(self_hostname, rpc_server.listen_port, bt.root_path, config)
client_node = await FullNodeRpcClient.create(self_hostname, rpc_server_node.listen_port, bt.root_path, config)
did_wallet_maker: DIDWallet = await DIDWallet.create_new_did_wallet(
wallet_node_maker.wallet_state_manager, wallet_maker, uint64(1)
)
spend_bundle_list = await wallet_node_maker.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(
did_wallet_maker.id()
)
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await time_out_assert(30, wallet_maker.get_pending_change_balance, 0)
await time_out_assert(30, wallet_maker.get_unconfirmed_balance, funds - 1)
await time_out_assert(30, wallet_maker.get_confirmed_balance, funds - 1)
hex_did_id = did_wallet_maker.get_my_DID()
hmr_did_id = encode_puzzle_hash(bytes32.from_hexstr(hex_did_id), AddressType.DID.hrp(config))
nft_wallet_maker = await api_maker.create_new_wallet(
dict(wallet_type="nft_wallet", name="NFT WALLET 1", did_id=hmr_did_id)
)
assert isinstance(nft_wallet_maker, dict)
assert nft_wallet_maker.get("success")
nft_wallet_taker = await api_taker.create_new_wallet(dict(wallet_type="nft_wallet", name="NFT WALLET 2"))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
try:
n = 10
metadata_list = [
{
"hash": bytes32(token_bytes(32)).hex(),
"uris": ["https://data.com/{}".format(i)],
"meta_hash": bytes32(token_bytes(32)).hex(),
"meta_uris": ["https://meatadata.com/{}".format(i)],
"license_hash": bytes32(token_bytes(32)).hex(),
"license_uris": ["https://license.com/{}".format(i)],
"edition_number": i + 1,
"edition_total": n,
}
for i in range(n)
]
target_list = [encode_puzzle_hash((ph_taker), "xfx") for x in range(n)]
royalty_address = encode_puzzle_hash(bytes32(token_bytes(32)), "xfx")
royalty_percentage = 300
fee = 100
required_amount = n + (fee * n)
xfx_coins = await client.select_coins(amount=required_amount, wallet_id=wallet_maker.id())
funding_coin = xfx_coins[0]
assert funding_coin.amount >= required_amount
funding_coin_dict = xfx_coins[0].to_json_dict()
chunk = 5
next_coin = funding_coin
did_coin = (await client.select_coins(amount=1, wallet_id=2))[0]
did_lineage_parent = None
spends = []
for i in range(0, n, chunk):
resp: Dict[str, Any] = await client.nft_mint_bulk(
wallet_id=nft_wallet_maker["wallet_id"],
metadata_list=metadata_list[i : i + chunk],
target_list=target_list[i : i + chunk],
royalty_percentage=royalty_percentage,
royalty_address=royalty_address,
mint_number_start=i + 1,
mint_total=n,
xfx_coins=[next_coin.to_json_dict()],
xfx_change_target=funding_coin_dict["puzzle_hash"],
did_coin=did_coin.to_json_dict(),
did_lineage_parent=did_lineage_parent,
mint_from_did=True,
fee=fee,
)
assert resp["success"]
sb: SpendBundle = SpendBundle.from_json_dict(resp["spend_bundle"])
did_lineage_parent = [cn for cn in sb.removals() if cn.name() == did_coin.name()][0].parent_coin_info.hex()
did_coin = [cn for cn in sb.additions() if (cn.parent_coin_info == did_coin.name()) and (cn.amount == 1)][0]
spends.append(sb)
xfx_adds = [c for c in sb.additions() if c.puzzle_hash == funding_coin.puzzle_hash]
assert len(xfx_adds) == 1
next_coin = xfx_adds[0]
for sb in spends:
resp = await client_node.push_tx(sb)
assert resp["success"]
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await asyncio.sleep(2)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
async def get_taker_nfts() -> int:
nfts = (await api_taker.nft_get_nfts({"wallet_id": nft_wallet_taker["wallet_id"]}))["nft_list"]
return len(nfts)
# We are using a long time out here because it can take a long time for the NFTs to show up
# Even with only 10 NFTs it regularly takes longer than 30-40s for them to be found
await time_out_assert(60, get_taker_nfts, n)
# check NFT edition numbers
nfts = (await api_taker.nft_get_nfts({"wallet_id": nft_wallet_taker["wallet_id"]}))["nft_list"]
for nft in nfts:
edition_num = nft.edition_number
meta_dict = metadata_list[edition_num - 1]
assert meta_dict["hash"] == nft.data_hash.hex()
assert meta_dict["uris"] == nft.data_uris
assert meta_dict["meta_hash"] == nft.metadata_hash.hex()
assert meta_dict["meta_uris"] == nft.metadata_uris
assert meta_dict["license_hash"] == nft.license_hash.hex()
assert meta_dict["license_uris"] == nft.license_uris
assert meta_dict["edition_number"] == nft.edition_number
assert meta_dict["edition_total"] == nft.edition_total
finally:
client.close()
client_node.close()
rpc_server.close()
rpc_server_node.close()
await client.await_closed()
await client_node.await_closed()
await rpc_server.await_closed()
await rpc_server_node.await_closed()
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
async def test_nft_mint_from_did_rpc_no_royalties(two_wallet_nodes: Any, trusted: Any, self_hostname: str) -> None:
full_nodes, wallets, _ = two_wallet_nodes
full_node_api: FullNodeSimulator = full_nodes[0]
bt = full_node_api.bt
full_node_server = full_node_api.server
wallet_node_maker, server_0 = wallets[0]
wallet_node_taker, server_1 = wallets[1]
wallet_maker = wallet_node_maker.wallet_state_manager.main_wallet
wallet_taker = wallet_node_taker.wallet_state_manager.main_wallet
ph_maker = await wallet_maker.get_new_puzzlehash()
ph_taker = await wallet_taker.get_new_puzzlehash()
ph_token = bytes32(token_bytes())
if trusted:
wallet_node_maker.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
wallet_node_taker.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
else:
wallet_node_maker.config["trusted_peers"] = {}
wallet_node_taker.config["trusted_peers"] = {}
await server_0.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_1.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_maker))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_taker))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
funds = calculate_pool_reward(uint32(1)) + calculate_base_farmer_reward(uint32(1))
await time_out_assert(30, wallet_maker.get_unconfirmed_balance, funds)
await time_out_assert(30, wallet_maker.get_confirmed_balance, funds)
await time_out_assert(30, wallet_taker.get_unconfirmed_balance, funds)
await time_out_assert(30, wallet_taker.get_confirmed_balance, funds)
api_maker = WalletRpcApi(wallet_node_maker)
api_taker = WalletRpcApi(wallet_node_taker)
config = bt.config
daemon_port = config["daemon_port"]
def stop_node_cb() -> None:
pass
full_node_rpc_api = FullNodeRpcApi(full_node_api.full_node)
rpc_server_node = await start_rpc_server(
full_node_rpc_api,
self_hostname,
daemon_port,
uint16(0),
stop_node_cb,
bt.root_path,
config,
connect_to_daemon=False,
)
rpc_server = await start_rpc_server(
api_maker,
self_hostname,
daemon_port,
uint16(0),
lambda x: None, # type: ignore
bt.root_path,
config,
connect_to_daemon=False,
)
client = await WalletRpcClient.create(self_hostname, rpc_server.listen_port, bt.root_path, config)
client_node = await FullNodeRpcClient.create(self_hostname, rpc_server_node.listen_port, bt.root_path, config)
did_wallet_maker: DIDWallet = await DIDWallet.create_new_did_wallet(
wallet_node_maker.wallet_state_manager, wallet_maker, uint64(1)
)
spend_bundle_list = await wallet_node_maker.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(
did_wallet_maker.id()
)
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await time_out_assert(30, wallet_maker.get_pending_change_balance, 0)
await time_out_assert(30, wallet_maker.get_unconfirmed_balance, funds - 1)
await time_out_assert(30, wallet_maker.get_confirmed_balance, funds - 1)
hex_did_id = did_wallet_maker.get_my_DID()
hmr_did_id = encode_puzzle_hash(bytes32.from_hexstr(hex_did_id), AddressType.DID.hrp(config))
nft_wallet_maker = await api_maker.create_new_wallet(
dict(wallet_type="nft_wallet", name="NFT WALLET 1", did_id=hmr_did_id)
)
assert isinstance(nft_wallet_maker, dict)
assert nft_wallet_maker.get("success")
nft_wallet_taker = await api_taker.create_new_wallet(dict(wallet_type="nft_wallet", name="NFT WALLET 2"))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
try:
n = 10
metadata_list = [
{
"hash": bytes32(token_bytes(32)).hex(),
"uris": ["https://data.com/{}".format(i)],
"meta_hash": bytes32(token_bytes(32)).hex(),
"meta_uris": ["https://meatadata.com/{}".format(i)],
"license_hash": bytes32(token_bytes(32)).hex(),
"license_uris": ["https://license.com/{}".format(i)],
"edition_number": i + 1,
"edition_total": n,
}
for i in range(n)
]
target_list = [encode_puzzle_hash((ph_taker), "xfx") for x in range(n)]
royalty_address = None
royalty_percentage = None
required_amount = n
xfx_coins = await client.select_coins(amount=required_amount, wallet_id=wallet_maker.id())
funding_coin = xfx_coins[0]
assert funding_coin.amount >= required_amount
funding_coin_dict = xfx_coins[0].to_json_dict()
chunk = 5
next_coin = funding_coin
did_coin = (await client.select_coins(amount=1, wallet_id=2))[0]
did_lineage_parent = None
spends = []
for i in range(0, n, chunk):
resp: Dict[str, Any] = await client.nft_mint_bulk(
wallet_id=nft_wallet_maker["wallet_id"],
metadata_list=metadata_list[i : i + chunk],
target_list=target_list[i : i + chunk],
royalty_percentage=royalty_percentage,
royalty_address=royalty_address,
mint_number_start=i + 1,
mint_total=n,
xfx_coins=[next_coin.to_json_dict()],
xfx_change_target=funding_coin_dict["puzzle_hash"],
did_coin=did_coin.to_json_dict(),
did_lineage_parent=did_lineage_parent,
mint_from_did=True,
)
assert resp["success"]
sb: SpendBundle = SpendBundle.from_json_dict(resp["spend_bundle"])
did_lineage_parent = [cn for cn in sb.removals() if cn.name() == did_coin.name()][0].parent_coin_info.hex()
did_coin = [cn for cn in sb.additions() if (cn.parent_coin_info == did_coin.name()) and (cn.amount == 1)][0]
spends.append(sb)
xfx_adds = [c for c in sb.additions() if c.puzzle_hash == funding_coin.puzzle_hash]
assert len(xfx_adds) == 1
next_coin = xfx_adds[0]
for sb in spends:
resp = await client_node.push_tx(sb)
assert resp["success"]
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await asyncio.sleep(2)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
async def get_taker_nfts() -> int:
nfts = (await api_taker.nft_get_nfts({"wallet_id": nft_wallet_taker["wallet_id"]}))["nft_list"]
return len(nfts)
await time_out_assert(60, get_taker_nfts, n)
finally:
client.close()
client_node.close()
rpc_server.close()
rpc_server_node.close()
await client.await_closed()
await client_node.await_closed()
await rpc_server.await_closed()
await rpc_server_node.await_closed()
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
async def test_nft_mint_from_did_multiple_xfx(two_wallet_nodes: Any, trusted: Any) -> None:
full_nodes, wallets, _ = two_wallet_nodes
full_node_api: FullNodeSimulator = full_nodes[0]
full_node_server = full_node_api.server
wallet_node_0, server_0 = wallets[0]
wallet_node_1, server_1 = wallets[1]
wallet_maker = wallet_node_0.wallet_state_manager.main_wallet
wallet_taker = wallet_node_1.wallet_state_manager.main_wallet
api_0 = WalletRpcApi(wallet_node_0)
ph_maker = await wallet_maker.get_new_puzzlehash()
ph_taker = await wallet_taker.get_new_puzzlehash()
ph_token = bytes32(token_bytes())
if trusted:
wallet_node_0.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
wallet_node_1.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
else:
wallet_node_0.config["trusted_peers"] = {}
wallet_node_1.config["trusted_peers"] = {}
await server_0.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_1.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_maker))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
funds = calculate_pool_reward(uint32(1)) + calculate_base_farmer_reward(uint32(1))
await time_out_assert(30, wallet_maker.get_unconfirmed_balance, funds)
await time_out_assert(30, wallet_maker.get_confirmed_balance, funds)
did_wallet: DIDWallet = await DIDWallet.create_new_did_wallet(
wallet_node_0.wallet_state_manager, wallet_maker, uint64(1)
)
spend_bundle_list = await wallet_node_0.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(did_wallet.id())
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await time_out_assert(30, wallet_maker.get_pending_change_balance, 0)
hex_did_id = did_wallet.get_my_DID()
did_id = bytes32.fromhex(hex_did_id)
await time_out_assert(5, did_wallet.get_confirmed_balance, 1)
nft_wallet_maker = await NFTWallet.create_new_nft_wallet(
wallet_node_0.wallet_state_manager, wallet_maker, name="NFT WALLET 1", did_id=did_id
)
nft_wallet_taker = await NFTWallet.create_new_nft_wallet(
wallet_node_1.wallet_state_manager, wallet_taker, name="NFT WALLET 2"
)
# construct sample metadata
metadata = Program.to(
[
("u", ["https://www.flaxnetwork.org/img/branding/flax-logo.svg"]),
("h", "0xD4584AD463139FA8C0D9F68F4B59F185"),
]
)
royalty_pc = uint16(300)
royalty_addr = ph_maker
mint_total = 1
fee = uint64(100)
metadata_list = [
{"program": metadata, "royalty_pc": royalty_pc, "royalty_ph": royalty_addr} for x in range(mint_total)
]
# Grab two coins for testing that we can create a bulk minting with more than 1 xfx coin
xfx_coins_1 = await wallet_maker.select_coins(amount=10000)
xfx_coins_2 = await wallet_maker.select_coins(amount=10000, exclude=xfx_coins_1)
xfx_coins = xfx_coins_1.union(xfx_coins_2)
target_list = [ph_taker for x in range(mint_total)]
sb = await nft_wallet_maker.mint_from_did(
metadata_list,
target_list=target_list,
mint_number_start=1,
mint_total=mint_total,
xfx_coins=xfx_coins,
fee=fee,
)
await api_0.push_tx({"spend_bundle": bytes(sb).hex()})
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, sb.name())
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await time_out_assert(30, nft_count, mint_total, nft_wallet_taker)
await time_out_assert(30, nft_count, 0, nft_wallet_maker)
# confirm that the spend uses the right amount of xfx
expected_xfx_bal = funds - fee - mint_total - 1
await time_out_assert(30, wallet_maker.get_confirmed_balance, expected_xfx_bal)
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
async def test_nft_mint_from_xfx(two_wallet_nodes: Any, trusted: Any) -> None:
full_nodes, wallets, _ = two_wallet_nodes
full_node_api: FullNodeSimulator = full_nodes[0]
full_node_server = full_node_api.server
wallet_node_0, server_0 = wallets[0]
wallet_node_1, server_1 = wallets[1]
wallet_0 = wallet_node_0.wallet_state_manager.main_wallet
wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
api_0 = WalletRpcApi(wallet_node_0)
ph_maker = await wallet_0.get_new_puzzlehash()
ph_taker = await wallet_1.get_new_puzzlehash()
ph_token = bytes32(token_bytes())
if trusted:
wallet_node_0.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
wallet_node_1.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
else:
wallet_node_0.config["trusted_peers"] = {}
wallet_node_1.config["trusted_peers"] = {}
await server_0.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_1.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
# for _ in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_maker))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
funds = calculate_pool_reward(uint32(1)) + calculate_base_farmer_reward(uint32(1))
await time_out_assert(30, wallet_0.get_unconfirmed_balance, funds)
await time_out_assert(30, wallet_0.get_confirmed_balance, funds)
did_wallet: DIDWallet = await DIDWallet.create_new_did_wallet(
wallet_node_0.wallet_state_manager, wallet_0, uint64(1)
)
spend_bundle_list = await wallet_node_0.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(did_wallet.id())
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await time_out_assert(30, wallet_0.get_pending_change_balance, 0)
hex_did_id = did_wallet.get_my_DID()
did_id = bytes32.fromhex(hex_did_id)
await time_out_assert(5, did_wallet.get_confirmed_balance, 1)
nft_wallet_maker = await NFTWallet.create_new_nft_wallet(
wallet_node_0.wallet_state_manager, wallet_0, name="NFT WALLET 1", did_id=did_id
)
nft_wallet_taker = await NFTWallet.create_new_nft_wallet(
wallet_node_1.wallet_state_manager, wallet_1, name="NFT WALLET 2"
)
# construct sample metadata
metadata = Program.to(
[
("u", ["https://www.flaxnetwork.org/img/branding/flax-logo.svg"]),
("h", "0xD4584AD463139FA8C0D9F68F4B59F185"),
]
)
royalty_pc = uint16(300)
royalty_addr = ph_maker
mint_total = 1
fee = uint64(100)
metadata_list = [
{"program": metadata, "royalty_pc": royalty_pc, "royalty_ph": royalty_addr} for x in range(mint_total)
]
target_list = [ph_taker for x in range(mint_total)]
sb = await nft_wallet_maker.mint_from_xfx(
metadata_list, target_list=target_list, mint_number_start=1, mint_total=mint_total, fee=fee
)
await api_0.push_tx({"spend_bundle": bytes(sb).hex()})
await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, sb.name())
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await time_out_assert(30, nft_count, mint_total, nft_wallet_taker)
await time_out_assert(30, nft_count, 0, nft_wallet_maker)
expected_xfx_bal = funds - fee - mint_total - 1
await time_out_assert(30, wallet_0.get_confirmed_balance, expected_xfx_bal)
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
async def test_nft_mint_from_xfx_rpc(two_wallet_nodes: Any, trusted: Any, self_hostname: str) -> None:
full_nodes, wallets, _ = two_wallet_nodes
full_node_api: FullNodeSimulator = full_nodes[0]
bt = full_node_api.bt
full_node_server = full_node_api.server
wallet_node_maker, server_0 = wallets[0]
wallet_node_taker, server_1 = wallets[1]
wallet_maker = wallet_node_maker.wallet_state_manager.main_wallet
wallet_taker = wallet_node_taker.wallet_state_manager.main_wallet
ph_maker = await wallet_maker.get_new_puzzlehash()
ph_taker = await wallet_taker.get_new_puzzlehash()
ph_token = bytes32(token_bytes())
if trusted:
wallet_node_maker.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
wallet_node_taker.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
else:
wallet_node_maker.config["trusted_peers"] = {}
wallet_node_taker.config["trusted_peers"] = {}
await server_0.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_1.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_maker))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_taker))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
funds = calculate_pool_reward(uint32(1)) + calculate_base_farmer_reward(uint32(1))
await time_out_assert(30, wallet_maker.get_unconfirmed_balance, funds)
await time_out_assert(30, wallet_maker.get_confirmed_balance, funds)
await time_out_assert(30, wallet_taker.get_unconfirmed_balance, funds)
await time_out_assert(30, wallet_taker.get_confirmed_balance, funds)
api_maker = WalletRpcApi(wallet_node_maker)
api_taker = WalletRpcApi(wallet_node_taker)
config = bt.config
daemon_port = config["daemon_port"]
def stop_node_cb() -> None:
pass
full_node_rpc_api = FullNodeRpcApi(full_node_api.full_node)
rpc_server_node = await start_rpc_server(
full_node_rpc_api,
self_hostname,
daemon_port,
uint16(0),
stop_node_cb,
bt.root_path,
config,
connect_to_daemon=False,
)
rpc_server = await start_rpc_server(
api_maker,
self_hostname,
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | true |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/nft_wallet/test_ownership_outer_puzzle.py | tests/wallet/nft_wallet/test_ownership_outer_puzzle.py | from __future__ import annotations
from typing import Optional
from clvm_tools.binutils import assemble
from flax.types.blockchain_format.program import Program
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.util.ints import uint16
from flax.wallet.nft_wallet.ownership_outer_puzzle import puzzle_for_ownership_layer
from flax.wallet.nft_wallet.transfer_program_puzzle import puzzle_for_transfer_program
from flax.wallet.outer_puzzles import construct_puzzle, get_inner_puzzle, get_inner_solution, match_puzzle, solve_puzzle
from flax.wallet.puzzle_drivers import PuzzleInfo, Solver
from flax.wallet.uncurried_puzzle import uncurry_puzzle
def test_ownership_outer_puzzle() -> None:
ACS = Program.to(1)
NIL = Program.to([])
owner = bytes32([0] * 32)
# (mod (current_owner conditions solution)
# (list current_owner () conditions)
# )
transfer_program = assemble( # type: ignore
"""
(c 2 (c () (c 5 ())))
"""
)
transfer_program_default: Program = puzzle_for_transfer_program(bytes32([1] * 32), bytes32([2] * 32), uint16(5000))
ownership_puzzle: Program = puzzle_for_ownership_layer(owner, transfer_program, ACS)
ownership_puzzle_empty: Program = puzzle_for_ownership_layer(NIL, transfer_program, ACS)
ownership_puzzle_default: Program = puzzle_for_ownership_layer(owner, transfer_program_default, ACS)
ownership_driver: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(ownership_puzzle))
ownership_driver_empty: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(ownership_puzzle_empty))
ownership_driver_default: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(ownership_puzzle_default))
transfer_program_driver: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(transfer_program_default))
assert ownership_driver is not None
assert ownership_driver_empty is not None
assert ownership_driver_default is not None
assert transfer_program_driver is not None
assert ownership_driver.type() == "ownership"
assert ownership_driver["owner"] == owner
assert ownership_driver_empty["owner"] == NIL
assert ownership_driver["transfer_program"] == transfer_program
assert ownership_driver_default["transfer_program"] == transfer_program_driver
assert transfer_program_driver.type() == "royalty transfer program"
assert transfer_program_driver["launcher_id"] == bytes32([1] * 32)
assert transfer_program_driver["royalty_address"] == bytes32([2] * 32)
assert transfer_program_driver["royalty_percentage"] == 5000
assert construct_puzzle(ownership_driver, ACS) == ownership_puzzle
assert construct_puzzle(ownership_driver_empty, ACS) == ownership_puzzle_empty
assert construct_puzzle(ownership_driver_default, ACS) == ownership_puzzle_default
assert get_inner_puzzle(ownership_driver, uncurry_puzzle(ownership_puzzle)) == ACS
# Set up for solve
inner_solution = Program.to(
[
[51, ACS.get_tree_hash(), 1],
[-10],
]
)
solution: Program = solve_puzzle(
ownership_driver,
Solver({}),
ACS,
inner_solution,
)
ownership_puzzle.run(solution)
assert get_inner_solution(ownership_driver, solution) == inner_solution
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/nft_wallet/test_nft_1_offers.py | tests/wallet/nft_wallet/test_nft_1_offers.py | from __future__ import annotations
import asyncio
import logging
from secrets import token_bytes
from typing import Any, Callable, Coroutine, Optional
import pytest
from flax.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward
from flax.full_node.mempool_manager import MempoolManager
from flax.simulator.full_node_simulator import FullNodeSimulator
from flax.simulator.simulator_protocol import FarmNewBlockProtocol
from flax.simulator.time_out_assert import time_out_assert, time_out_assert_not_none
from flax.types.blockchain_format.program import Program
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.peer_info import PeerInfo
from flax.util.ints import uint16, uint32, uint64, uint128
from flax.wallet.cat_wallet.cat_wallet import CATWallet
from flax.wallet.did_wallet.did_wallet import DIDWallet
from flax.wallet.nft_wallet.nft_wallet import NFTWallet
from flax.wallet.outer_puzzles import create_asset_id, match_puzzle
from flax.wallet.puzzle_drivers import PuzzleInfo
from flax.wallet.trading.offer import Offer
from flax.wallet.trading.trade_status import TradeStatus
from flax.wallet.uncurried_puzzle import uncurry_puzzle
from flax.wallet.util.compute_memos import compute_memos
# from clvm_tools.binutils import disassemble
from tests.util.wallet_is_synced import wallets_are_synced
logging.getLogger("aiosqlite").setLevel(logging.INFO) # Too much logging on debug level
async def tx_in_pool(mempool: MempoolManager, tx_id: bytes32) -> bool:
tx = mempool.get_spendbundle(tx_id)
if tx is None:
return False
return True
def mempool_not_empty(fnapi: FullNodeSimulator) -> bool:
return len(fnapi.full_node.mempool_manager.mempool.spends) > 0
async def farm_blocks_until(
predicate_f: Callable[[], Coroutine[Any, Any, bool]], fnapi: FullNodeSimulator, ph: bytes32
) -> None:
for i in range(50):
await fnapi.farm_new_transaction_block(FarmNewBlockProtocol(ph))
if await predicate_f():
return None
await asyncio.sleep(0.3)
raise TimeoutError()
async def get_nft_count(wallet: NFTWallet) -> int:
return len(await wallet.get_current_nfts())
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
# @pytest.mark.skip
async def test_nft_offer_sell_nft(two_wallet_nodes: Any, trusted: Any) -> None:
full_nodes, wallets, _ = two_wallet_nodes
full_node_api: FullNodeSimulator = full_nodes[0]
full_node_server = full_node_api.server
wallet_node_maker, server_0 = wallets[0]
wallet_node_taker, server_1 = wallets[1]
wallet_maker = wallet_node_maker.wallet_state_manager.main_wallet
wallet_taker = wallet_node_taker.wallet_state_manager.main_wallet
ph_maker = await wallet_maker.get_new_puzzlehash()
ph_taker = await wallet_taker.get_new_puzzlehash()
ph_token = bytes32(token_bytes())
if trusted:
wallet_node_maker.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
wallet_node_taker.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
else:
wallet_node_maker.config["trusted_peers"] = {}
wallet_node_taker.config["trusted_peers"] = {}
await server_0.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_1.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_maker))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_taker))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_maker, wallet_node_taker], full_node_api)
funds = sum([calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, 2)])
await time_out_assert(20, wallet_maker.get_unconfirmed_balance, funds)
await time_out_assert(20, wallet_maker.get_confirmed_balance, funds)
await time_out_assert(20, wallet_taker.get_unconfirmed_balance, funds)
await time_out_assert(20, wallet_taker.get_confirmed_balance, funds)
did_wallet_maker: DIDWallet = await DIDWallet.create_new_did_wallet(
wallet_node_maker.wallet_state_manager, wallet_maker, uint64(1)
)
spend_bundle_list = await wallet_node_maker.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(
did_wallet_maker.id()
)
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(20, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_maker, wallet_node_taker], full_node_api)
await time_out_assert(20, wallet_maker.get_pending_change_balance, 0)
await time_out_assert(20, wallet_maker.get_unconfirmed_balance, funds - 1)
await time_out_assert(20, wallet_maker.get_confirmed_balance, funds - 1)
hex_did_id = did_wallet_maker.get_my_DID()
did_id = bytes32.fromhex(hex_did_id)
target_puzhash = ph_maker
royalty_puzhash = ph_maker
royalty_basis_pts = uint16(200)
nft_wallet_maker = await NFTWallet.create_new_nft_wallet(
wallet_node_maker.wallet_state_manager, wallet_maker, name="NFT WALLET DID 1", did_id=did_id
)
metadata = Program.to(
[
("u", ["https://www.flaxnetwork.org/img/branding/flax-logo.svg"]),
("h", "0xD4584AD463139FA8C0D9F68F4B59F185"),
]
)
sb = await nft_wallet_maker.generate_new_nft(
metadata,
target_puzhash,
royalty_puzhash,
royalty_basis_pts,
did_id,
)
assert sb
# ensure hints are generated
assert compute_memos(sb)
await time_out_assert_not_none(20, full_node_api.full_node.mempool_manager.get_spendbundle, sb.name())
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_maker, wallet_node_taker], full_node_api)
await time_out_assert(20, get_nft_count, 1, nft_wallet_maker)
# TAKER SETUP - NO DID
nft_wallet_taker = await NFTWallet.create_new_nft_wallet(
wallet_node_taker.wallet_state_manager, wallet_taker, name="NFT WALLET TAKER"
)
# maker create offer: NFT for xfx
trade_manager_maker = wallet_maker.wallet_state_manager.trade_manager
trade_manager_taker = wallet_taker.wallet_state_manager.trade_manager
coins_maker = await nft_wallet_maker.get_current_nfts()
assert len(coins_maker) == 1
coins_taker = await nft_wallet_taker.get_current_nfts()
assert len(coins_taker) == 0
nft_to_offer = coins_maker[0]
nft_to_offer_info: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(nft_to_offer.full_puzzle))
nft_to_offer_asset_id: bytes32 = create_asset_id(nft_to_offer_info) # type: ignore
xfx_requested = 1000
maker_fee = uint64(433)
offer_did_nft_for_xfx = {nft_to_offer_asset_id: -1, wallet_maker.id(): xfx_requested}
success, trade_make, error = await trade_manager_maker.create_offer_for_ids(
offer_did_nft_for_xfx, {}, fee=maker_fee
)
assert success is True
assert error is None
assert trade_make is not None
taker_fee = 1
assert not mempool_not_empty(full_node_api)
peer = wallet_node_taker.get_full_node_peer()
assert peer is not None
success, trade_take, error = await trade_manager_taker.respond_to_offer(
Offer.from_bytes(trade_make.offer), peer, fee=uint64(taker_fee)
)
await time_out_assert(20, mempool_not_empty, True, full_node_api)
assert error is None
assert success is True
assert trade_take is not None
async def maker_0_taker_1() -> bool:
return (
len(await nft_wallet_maker.get_current_nfts()) == 0 and len(await nft_wallet_taker.get_current_nfts()) == 1
)
await farm_blocks_until(maker_0_taker_1, full_node_api, ph_token)
await time_out_assert(20, wallets_are_synced, True, [wallet_node_maker, wallet_node_taker], full_node_api)
await time_out_assert(20, get_nft_count, 0, nft_wallet_maker)
await time_out_assert(20, get_nft_count, 1, nft_wallet_taker)
# assert payments and royalties
expected_royalty = uint64(xfx_requested * royalty_basis_pts / 10000)
expected_maker_balance = funds - 2 - maker_fee + xfx_requested + expected_royalty
expected_taker_balance = funds - taker_fee - xfx_requested - expected_royalty
await time_out_assert(20, wallet_maker.get_confirmed_balance, expected_maker_balance)
await time_out_assert(20, wallet_taker.get_confirmed_balance, expected_taker_balance)
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
# @pytest.mark.skip
async def test_nft_offer_request_nft(two_wallet_nodes: Any, trusted: Any) -> None:
full_nodes, wallets, _ = two_wallet_nodes
full_node_api: FullNodeSimulator = full_nodes[0]
full_node_server = full_node_api.server
wallet_node_maker, server_0 = wallets[0]
wallet_node_taker, server_1 = wallets[1]
wallet_maker = wallet_node_maker.wallet_state_manager.main_wallet
wallet_taker = wallet_node_taker.wallet_state_manager.main_wallet
ph_maker = await wallet_maker.get_new_puzzlehash()
ph_taker = await wallet_taker.get_new_puzzlehash()
ph_token = bytes32(token_bytes())
if trusted:
wallet_node_maker.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
wallet_node_taker.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
else:
wallet_node_maker.config["trusted_peers"] = {}
wallet_node_taker.config["trusted_peers"] = {}
await server_0.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_1.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_maker))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_taker))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_maker, wallet_node_taker], full_node_api)
funds = sum([calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, 2)])
await time_out_assert(20, wallet_maker.get_unconfirmed_balance, funds)
await time_out_assert(20, wallet_maker.get_confirmed_balance, funds)
await time_out_assert(20, wallet_taker.get_unconfirmed_balance, funds)
await time_out_assert(20, wallet_taker.get_confirmed_balance, funds)
did_wallet_taker: DIDWallet = await DIDWallet.create_new_did_wallet(
wallet_node_taker.wallet_state_manager, wallet_taker, uint64(1)
)
spend_bundle_list = await wallet_node_taker.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(
did_wallet_taker.id()
)
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(20, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_maker, wallet_node_taker], full_node_api)
await time_out_assert(20, wallet_taker.get_pending_change_balance, 0)
hex_did_id = did_wallet_taker.get_my_DID()
did_id = bytes32.fromhex(hex_did_id)
target_puzhash = ph_taker
royalty_puzhash = ph_taker
royalty_basis_pts = uint16(200)
nft_wallet_taker = await NFTWallet.create_new_nft_wallet(
wallet_node_taker.wallet_state_manager, wallet_taker, name="NFT WALLET DID TAKER", did_id=did_id
)
metadata = Program.to(
[
("u", ["https://www.flaxnetwork.org/img/branding/flax-logo.svg"]),
("h", "0xD4584AD463139FA8C0D9F68F4B59F185"),
]
)
await time_out_assert(20, wallet_taker.get_unconfirmed_balance, funds - 1)
await time_out_assert(20, wallet_taker.get_confirmed_balance, funds - 1)
sb = await nft_wallet_taker.generate_new_nft(
metadata,
target_puzhash,
royalty_puzhash,
royalty_basis_pts,
did_id,
)
assert sb
# ensure hints are generated
assert compute_memos(sb)
await time_out_assert_not_none(20, full_node_api.full_node.mempool_manager.get_spendbundle, sb.name())
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_maker, wallet_node_taker], full_node_api)
await time_out_assert(20, get_nft_count, 1, nft_wallet_taker)
# MAKER SETUP - NO DID
nft_wallet_maker = await NFTWallet.create_new_nft_wallet(
wallet_node_maker.wallet_state_manager, wallet_maker, name="NFT WALLET MAKER"
)
# maker create offer: NFT for xfx
trade_manager_maker = wallet_maker.wallet_state_manager.trade_manager
trade_manager_taker = wallet_taker.wallet_state_manager.trade_manager
coins_maker = await nft_wallet_maker.get_current_nfts()
assert len(coins_maker) == 0
coins_taker = await nft_wallet_taker.get_current_nfts()
assert len(coins_taker) == 1
nft_to_request = coins_taker[0]
nft_to_request_info: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(nft_to_request.full_puzzle))
assert isinstance(nft_to_request_info, PuzzleInfo)
nft_to_request_asset_id = create_asset_id(nft_to_request_info)
xfx_offered = 1000
maker_fee = 10
driver_dict = {nft_to_request_asset_id: nft_to_request_info}
offer_dict = {nft_to_request_asset_id: 1, wallet_maker.id(): -xfx_offered}
success, trade_make, error = await trade_manager_maker.create_offer_for_ids(offer_dict, driver_dict, fee=maker_fee)
assert success is True
assert error is None
assert trade_make is not None
taker_fee = 1
peer = wallet_node_taker.get_full_node_peer()
assert peer is not None
success, trade_take, error = await trade_manager_taker.respond_to_offer(
Offer.from_bytes(trade_make.offer), peer, fee=uint64(taker_fee)
)
await time_out_assert(20, mempool_not_empty, True, full_node_api)
assert error is None
assert success is True
assert trade_take is not None
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_maker, wallet_node_taker], full_node_api)
async def maker_1_taker_0() -> bool:
return (
len(await nft_wallet_maker.get_current_nfts()) == 1 and len(await nft_wallet_taker.get_current_nfts()) == 0
)
await farm_blocks_until(maker_1_taker_0, full_node_api, ph_token)
# assert payments and royalties
expected_royalty = uint64(xfx_offered * royalty_basis_pts / 10000)
expected_maker_balance = funds - maker_fee - xfx_offered - expected_royalty
expected_taker_balance = funds - 2 - taker_fee + xfx_offered + expected_royalty
await time_out_assert(20, wallet_maker.get_confirmed_balance, expected_maker_balance)
await time_out_assert(20, wallet_taker.get_confirmed_balance, expected_taker_balance)
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
# @pytest.mark.skip
async def test_nft_offer_sell_did_to_did(two_wallet_nodes: Any, trusted: Any) -> None:
full_nodes, wallets, _ = two_wallet_nodes
full_node_api: FullNodeSimulator = full_nodes[0]
full_node_server = full_node_api.server
wallet_node_maker, server_0 = wallets[0]
wallet_node_taker, server_1 = wallets[1]
wallet_maker = wallet_node_maker.wallet_state_manager.main_wallet
wallet_taker = wallet_node_taker.wallet_state_manager.main_wallet
ph_maker = await wallet_maker.get_new_puzzlehash()
ph_taker = await wallet_taker.get_new_puzzlehash()
ph_token = bytes32(token_bytes())
if trusted:
wallet_node_maker.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
wallet_node_taker.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
else:
wallet_node_maker.config["trusted_peers"] = {}
wallet_node_taker.config["trusted_peers"] = {}
await server_0.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_1.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_maker))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_taker))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_maker, wallet_node_taker], full_node_api)
funds = sum([calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, 2)])
await time_out_assert(20, wallet_maker.get_unconfirmed_balance, funds)
await time_out_assert(20, wallet_maker.get_confirmed_balance, funds)
await time_out_assert(20, wallet_taker.get_unconfirmed_balance, funds)
await time_out_assert(20, wallet_taker.get_confirmed_balance, funds)
did_wallet_maker: DIDWallet = await DIDWallet.create_new_did_wallet(
wallet_node_maker.wallet_state_manager, wallet_maker, uint64(1)
)
spend_bundle_list = await wallet_node_maker.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(
did_wallet_maker.id()
)
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(20, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_maker, wallet_node_taker], full_node_api)
await time_out_assert(20, wallet_maker.get_pending_change_balance, 0)
hex_did_id = did_wallet_maker.get_my_DID()
did_id = bytes32.fromhex(hex_did_id)
target_puzhash = ph_maker
royalty_puzhash = ph_maker
royalty_basis_pts = uint16(200)
nft_wallet_maker = await NFTWallet.create_new_nft_wallet(
wallet_node_maker.wallet_state_manager, wallet_maker, name="NFT WALLET DID 1", did_id=did_id
)
metadata = Program.to(
[
("u", ["https://www.flaxnetwork.org/img/branding/flax-logo.svg"]),
("h", "0xD4584AD463139FA8C0D9F68F4B59F185"),
]
)
await time_out_assert(20, wallet_maker.get_unconfirmed_balance, funds - 1)
await time_out_assert(20, wallet_maker.get_confirmed_balance, funds - 1)
sb = await nft_wallet_maker.generate_new_nft(
metadata,
target_puzhash,
royalty_puzhash,
royalty_basis_pts,
did_id,
)
assert sb
# ensure hints are generated
assert compute_memos(sb)
await time_out_assert_not_none(20, full_node_api.full_node.mempool_manager.get_spendbundle, sb.name())
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32([0] * 32)))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_maker, wallet_node_taker], full_node_api)
await time_out_assert(20, get_nft_count, 1, nft_wallet_maker)
# TAKER SETUP - WITH DID
did_wallet_taker: DIDWallet = await DIDWallet.create_new_did_wallet(
wallet_node_taker.wallet_state_manager, wallet_taker, uint64(1)
)
spend_bundle_list_taker = await wallet_node_taker.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(
did_wallet_taker.id()
)
spend_bundle_taker = spend_bundle_list_taker[0].spend_bundle
await time_out_assert_not_none(
5, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle_taker.name()
)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_maker, wallet_node_taker], full_node_api)
await time_out_assert(20, wallet_taker.get_pending_change_balance, 0)
hex_did_id_taker = did_wallet_taker.get_my_DID()
did_id_taker = bytes32.fromhex(hex_did_id_taker)
nft_wallet_taker = await NFTWallet.create_new_nft_wallet(
wallet_node_taker.wallet_state_manager, wallet_taker, name="NFT WALLET TAKER", did_id=did_id_taker
)
# maker create offer: NFT for xfx
trade_manager_maker = wallet_maker.wallet_state_manager.trade_manager
trade_manager_taker = wallet_taker.wallet_state_manager.trade_manager
coins_maker = await nft_wallet_maker.get_current_nfts()
assert len(coins_maker) == 1
coins_taker = await nft_wallet_taker.get_current_nfts()
assert len(coins_taker) == 0
nft_to_offer = coins_maker[0]
nft_to_offer_info: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(nft_to_offer.full_puzzle))
nft_to_offer_asset_id: bytes32 = create_asset_id(nft_to_offer_info) # type: ignore
xfx_requested = 1000
maker_fee = uint64(433)
offer_did_nft_for_xfx = {nft_to_offer_asset_id: -1, wallet_maker.id(): xfx_requested}
success, trade_make, error = await trade_manager_maker.create_offer_for_ids(
offer_did_nft_for_xfx, {}, fee=maker_fee
)
assert success is True
assert error is None
assert trade_make is not None
taker_fee = 1
peer = wallet_node_taker.get_full_node_peer()
assert peer is not None
success, trade_take, error = await trade_manager_taker.respond_to_offer(
Offer.from_bytes(trade_make.offer), peer, fee=uint64(taker_fee)
)
await time_out_assert(20, mempool_not_empty, True, full_node_api)
assert error is None
assert success is True
assert trade_take is not None
async def maker_0_taker_1() -> bool:
return (
len(await nft_wallet_maker.get_current_nfts()) == 0
and len(wallet_taker.wallet_state_manager.wallets) == 4
and len(await wallet_taker.wallet_state_manager.wallets[4].get_current_nfts()) == 1
)
await farm_blocks_until(maker_0_taker_1, full_node_api, ph_token)
await time_out_assert(20, get_nft_count, 0, nft_wallet_maker)
# assert nnew nft wallet is created for taker
await time_out_assert(20, len, 4, wallet_taker.wallet_state_manager.wallets)
await time_out_assert(20, get_nft_count, 1, wallet_taker.wallet_state_manager.wallets[4])
assert (await wallet_taker.wallet_state_manager.wallets[4].get_current_nfts())[0].nft_id == nft_to_offer_asset_id
# assert payments and royalties
expected_royalty = uint64(xfx_requested * royalty_basis_pts / 10000)
expected_maker_balance = funds - 2 - maker_fee + xfx_requested + expected_royalty
expected_taker_balance = funds - 1 - taker_fee - xfx_requested - expected_royalty
await time_out_assert(20, wallet_maker.get_confirmed_balance, expected_maker_balance)
await time_out_assert(20, wallet_taker.get_confirmed_balance, expected_taker_balance)
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
# @pytest.mark.skip
async def test_nft_offer_sell_nft_for_cat(two_wallet_nodes: Any, trusted: Any) -> None:
full_nodes, wallets, _ = two_wallet_nodes
full_node_api: FullNodeSimulator = full_nodes[0]
full_node_server = full_node_api.server
wallet_node_maker, server_0 = wallets[0]
wallet_node_taker, server_1 = wallets[1]
wallet_maker = wallet_node_maker.wallet_state_manager.main_wallet
wallet_taker = wallet_node_taker.wallet_state_manager.main_wallet
ph_maker = await wallet_maker.get_new_puzzlehash()
ph_taker = await wallet_taker.get_new_puzzlehash()
ph_token = bytes32(token_bytes())
if trusted:
wallet_node_maker.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
wallet_node_taker.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
else:
wallet_node_maker.config["trusted_peers"] = {}
wallet_node_taker.config["trusted_peers"] = {}
await server_0.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_1.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_maker))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_taker))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_maker, wallet_node_taker], full_node_api)
funds = sum([calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, 2)])
await time_out_assert(20, wallet_maker.get_unconfirmed_balance, funds)
await time_out_assert(20, wallet_maker.get_confirmed_balance, funds)
await time_out_assert(20, wallet_taker.get_unconfirmed_balance, funds)
await time_out_assert(20, wallet_taker.get_confirmed_balance, funds)
did_wallet_maker: DIDWallet = await DIDWallet.create_new_did_wallet(
wallet_node_maker.wallet_state_manager, wallet_maker, uint64(1)
)
spend_bundle_list = await wallet_node_maker.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(
did_wallet_maker.id()
)
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(20, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_maker, wallet_node_taker], full_node_api)
await time_out_assert(20, wallet_maker.get_pending_change_balance, 0)
await time_out_assert(20, wallet_maker.get_unconfirmed_balance, funds - 1)
await time_out_assert(20, wallet_maker.get_confirmed_balance, funds - 1)
hex_did_id = did_wallet_maker.get_my_DID()
did_id = bytes32.fromhex(hex_did_id)
target_puzhash = ph_maker
royalty_puzhash = ph_maker
royalty_basis_pts = uint16(200)
nft_wallet_maker = await NFTWallet.create_new_nft_wallet(
wallet_node_maker.wallet_state_manager, wallet_maker, name="NFT WALLET DID 1", did_id=did_id
)
metadata = Program.to(
[
("u", ["https://www.flaxnetwork.org/img/branding/flax-logo.svg"]),
("h", "0xD4584AD463139FA8C0D9F68F4B59F185"),
]
)
sb = await nft_wallet_maker.generate_new_nft(
metadata,
target_puzhash,
royalty_puzhash,
royalty_basis_pts,
did_id,
)
assert sb
# ensure hints are generated
assert compute_memos(sb)
await time_out_assert_not_none(20, full_node_api.full_node.mempool_manager.get_spendbundle, sb.name())
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_maker, wallet_node_taker], full_node_api)
await time_out_assert(20, get_nft_count, 1, nft_wallet_maker)
# TAKER SETUP - NO DID
nft_wallet_taker = await NFTWallet.create_new_nft_wallet(
wallet_node_taker.wallet_state_manager, wallet_taker, name="NFT WALLET TAKER"
)
# maker create offer: NFT for xfx
trade_manager_maker = wallet_maker.wallet_state_manager.trade_manager
trade_manager_taker = wallet_taker.wallet_state_manager.trade_manager
coins_maker = await nft_wallet_maker.get_current_nfts()
assert len(coins_maker) == 1
coins_taker = await nft_wallet_taker.get_current_nfts()
assert len(coins_taker) == 0
# Create new CAT and wallets for maker and taker
# Trade them between maker and taker to ensure multiple coins for each cat
cats_to_mint = 100000
cats_to_trade = uint64(10000)
async with wallet_node_maker.wallet_state_manager.lock:
full_node_api.full_node.log.warning(
f"Mempool size: {len(full_node_api.full_node.mempool_manager.mempool.spends)}"
)
cat_wallet_maker: CATWallet = await CATWallet.create_new_cat_wallet(
wallet_node_maker.wallet_state_manager, wallet_maker, {"identifier": "genesis_by_id"}, uint64(cats_to_mint)
)
await time_out_assert(20, mempool_not_empty, True, full_node_api)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_maker, wallet_node_taker], full_node_api)
await time_out_assert(20, cat_wallet_maker.get_confirmed_balance, cats_to_mint)
await time_out_assert(20, cat_wallet_maker.get_unconfirmed_balance, cats_to_mint)
cat_wallet_taker: CATWallet = await CATWallet.create_wallet_for_cat(
wallet_node_taker.wallet_state_manager, wallet_taker, cat_wallet_maker.get_asset_id()
)
ph_taker_cat_1 = await wallet_taker.get_new_puzzlehash()
ph_taker_cat_2 = await wallet_taker.get_new_puzzlehash()
cat_tx_records = await cat_wallet_maker.generate_signed_transaction(
[cats_to_trade, cats_to_trade], [ph_taker_cat_1, ph_taker_cat_2], memos=[[ph_taker_cat_1], [ph_taker_cat_2]]
)
for tx_record in cat_tx_records:
await wallet_maker.wallet_state_manager.add_pending_transaction(tx_record)
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name() # type: ignore
)
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph_token))
await time_out_assert(20, wallets_are_synced, True, [wallet_node_maker, wallet_node_taker], full_node_api)
maker_cat_balance = cats_to_mint - (2 * cats_to_trade)
taker_cat_balance = 2 * cats_to_trade
await time_out_assert(20, cat_wallet_maker.get_confirmed_balance, maker_cat_balance)
await time_out_assert(20, cat_wallet_taker.get_confirmed_balance, taker_cat_balance)
nft_to_offer = coins_maker[0]
nft_to_offer_info: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(nft_to_offer.full_puzzle))
nft_to_offer_asset_id: bytes32 = create_asset_id(nft_to_offer_info) # type: ignore
cats_requested = 1000
maker_fee = uint64(433)
offer_did_nft_for_xfx = {nft_to_offer_asset_id: -1, cat_wallet_maker.id(): cats_requested}
success, trade_make, error = await trade_manager_maker.create_offer_for_ids(
offer_did_nft_for_xfx, {}, fee=maker_fee
)
assert success is True
assert error is None
assert trade_make is not None
taker_fee = 1
peer = wallet_node_taker.get_full_node_peer()
assert peer is not None
success, trade_take, error = await trade_manager_taker.respond_to_offer(
Offer.from_bytes(trade_make.offer), peer, fee=uint64(taker_fee)
)
await time_out_assert(20, mempool_not_empty, True, full_node_api)
assert error is None
assert success is True
assert trade_take is not None
async def maker_0_taker_1() -> bool:
return (
len(await nft_wallet_maker.get_current_nfts()) == 0 and len(await nft_wallet_taker.get_current_nfts()) == 1
)
await farm_blocks_until(maker_0_taker_1, full_node_api, ph_token)
# assert payments and royalties
expected_royalty = uint64(cats_requested * royalty_basis_pts / 10000)
expected_maker_balance = funds - 2 - cats_to_mint - maker_fee
expected_taker_balance = funds - taker_fee
expected_maker_cat_balance = maker_cat_balance + cats_requested + expected_royalty
expected_taker_cat_balance = taker_cat_balance - cats_requested - expected_royalty
await time_out_assert(20, wallet_maker.get_confirmed_balance, expected_maker_balance)
await time_out_assert(20, wallet_taker.get_confirmed_balance, expected_taker_balance)
await time_out_assert(20, cat_wallet_maker.get_confirmed_balance, expected_maker_cat_balance)
await time_out_assert(20, cat_wallet_taker.get_confirmed_balance, expected_taker_cat_balance)
@pytest.mark.parametrize("trusted", [True, False])
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | true |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/nft_wallet/config.py | tests/wallet/nft_wallet/config.py | from __future__ import annotations
job_timeout = 45
checkout_blocks_and_plots = True
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/nft_wallet/test_nft_wallet.py | tests/wallet/nft_wallet/test_nft_wallet.py | from __future__ import annotations
import asyncio
import time
from typing import Any, Awaitable, Callable, Dict, List
import pytest
from blspy import AugSchemeMPL, G1Element, G2Element
from clvm_tools.binutils import disassemble
from flax.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward
from flax.full_node.mempool_manager import MempoolManager
from flax.rpc.wallet_rpc_api import WalletRpcApi
from flax.simulator.full_node_simulator import FullNodeSimulator
from flax.simulator.simulator_protocol import FarmNewBlockProtocol, ReorgProtocol
from flax.simulator.time_out_assert import time_out_assert, time_out_assert_not_none
from flax.types.blockchain_format.program import Program
from flax.types.blockchain_format.sized_bytes import bytes32
from flax.types.peer_info import PeerInfo
from flax.types.spend_bundle import SpendBundle
from flax.util.bech32m import encode_puzzle_hash
from flax.util.byte_types import hexstr_to_bytes
from flax.util.ints import uint16, uint32, uint64
from flax.wallet.did_wallet.did_wallet import DIDWallet
from flax.wallet.nft_wallet.nft_wallet import NFTWallet
from flax.wallet.util.address_type import AddressType
from flax.wallet.util.compute_memos import compute_memos
from flax.wallet.util.wallet_types import WalletType
from flax.wallet.wallet_state_manager import WalletStateManager
from tests.util.wallet_is_synced import wallet_is_synced
async def tx_in_pool(mempool: MempoolManager, tx_id: bytes32) -> bool:
tx = mempool.get_spendbundle(tx_id)
if tx is None:
return False
return True
async def get_nft_count(wallet: NFTWallet) -> int:
return await wallet.get_nft_count()
async def get_wallet_number(manager: WalletStateManager) -> int:
return len(manager.wallets)
async def wait_rpc_state_condition(
timeout: int,
async_function: Callable[[Dict[str, Any]], Awaitable[Dict]],
params: List[Dict],
condition_func: Callable[[Dict[str, Any]], bool],
) -> Dict:
__tracebackhide__ = True
start = time.monotonic()
while True:
resp = await async_function(*params)
assert isinstance(resp, dict)
if condition_func(resp):
return resp
now = time.monotonic()
elapsed = now - start
if elapsed >= timeout:
raise asyncio.TimeoutError(
f"timed out while waiting for {async_function.__name__}(): {elapsed} >= {timeout}",
)
await asyncio.sleep(0.5)
async def make_new_block_with(resp: Dict, full_node_api: FullNodeSimulator, ph: bytes32) -> SpendBundle:
assert resp.get("success")
sb = resp["spend_bundle"]
assert isinstance(sb, SpendBundle)
await time_out_assert_not_none(30, full_node_api.full_node.mempool_manager.get_spendbundle, sb.name())
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
return sb
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
async def test_nft_wallet_creation_automatically(two_wallet_nodes: Any, trusted: Any) -> None:
num_blocks = 3
full_nodes, wallets, _ = two_wallet_nodes
full_node_api = full_nodes[0]
full_node_server = full_node_api.server
wallet_node_0, server_0 = wallets[0]
wallet_node_1, server_1 = wallets[1]
wallet_0 = wallet_node_0.wallet_state_manager.main_wallet
wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
ph = await wallet_0.get_new_puzzlehash()
ph1 = await wallet_1.get_new_puzzlehash()
if trusted:
wallet_node_0.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
wallet_node_1.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
else:
wallet_node_0.config["trusted_peers"] = {}
wallet_node_1.config["trusted_peers"] = {}
await server_0.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_1.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
funds = sum(
[calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks - 1)]
)
await time_out_assert(30, wallet_0.get_unconfirmed_balance, funds)
await time_out_assert(30, wallet_0.get_confirmed_balance, funds)
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph1))
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
await time_out_assert(30, wallet_0.get_pending_change_balance, 0)
nft_wallet_0 = await NFTWallet.create_new_nft_wallet(
wallet_node_0.wallet_state_manager, wallet_0, name="NFT WALLET 1"
)
metadata = Program.to(
[
("u", ["https://www.flaxnetwork.org/img/branding/flax-logo.svg"]),
("h", "0xD4584AD463139FA8C0D9F68F4B59F185"),
]
)
sb = await nft_wallet_0.generate_new_nft(metadata)
assert sb
await time_out_assert_not_none(30, full_node_api.full_node.mempool_manager.get_spendbundle, sb.name())
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph1))
await time_out_assert(30, get_nft_count, 1, nft_wallet_0)
coins = await nft_wallet_0.get_current_nfts()
assert len(coins) == 1, "nft not generated"
txs = await nft_wallet_0.generate_signed_transaction([uint64(coins[0].coin.amount)], [ph1], coins={coins[0].coin})
assert len(txs) == 1
assert txs[0].spend_bundle is not None
await wallet_node_0.wallet_state_manager.add_pending_transaction(txs[0])
await time_out_assert_not_none(
30, full_node_api.full_node.mempool_manager.get_spendbundle, txs[0].spend_bundle.name()
)
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph1))
async def num_wallets() -> int:
return len(await wallet_node_1.wallet_state_manager.get_all_wallet_info_entries())
await time_out_assert(30, num_wallets, 2)
# Get the new NFT wallet
nft_wallets = await wallet_node_1.wallet_state_manager.get_all_wallet_info_entries(WalletType.NFT)
assert len(nft_wallets) == 1
nft_wallet_1: NFTWallet = wallet_node_1.wallet_state_manager.wallets[nft_wallets[0].id]
await time_out_assert(30, get_nft_count, 0, nft_wallet_0)
await time_out_assert(30, get_nft_count, 1, nft_wallet_1)
coins = await nft_wallet_0.get_current_nfts()
assert len(coins) == 0
coins = await nft_wallet_1.get_current_nfts()
assert len(coins) == 1
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
async def test_nft_wallet_creation_and_transfer(two_wallet_nodes: Any, trusted: Any) -> None:
num_blocks = 2
full_nodes, wallets, _ = two_wallet_nodes
full_node_api: FullNodeSimulator = full_nodes[0]
full_node_server = full_node_api.server
wallet_node_0, server_0 = wallets[0]
wallet_node_1, server_1 = wallets[1]
wallet_0 = wallet_node_0.wallet_state_manager.main_wallet
wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
ph = await wallet_0.get_new_puzzlehash()
ph1 = await wallet_1.get_new_puzzlehash()
if trusted:
wallet_node_0.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
wallet_node_1.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
else:
wallet_node_0.config["trusted_peers"] = {}
wallet_node_1.config["trusted_peers"] = {}
await server_0.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_1.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
funds = sum(
[calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks - 1)]
)
await time_out_assert(30, wallet_0.get_unconfirmed_balance, funds)
await time_out_assert(30, wallet_0.get_confirmed_balance, funds)
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph1))
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
await time_out_assert(30, wallet_0.get_pending_change_balance, 0)
nft_wallet_0 = await NFTWallet.create_new_nft_wallet(
wallet_node_0.wallet_state_manager, wallet_0, name="NFT WALLET 1"
)
metadata = Program.to(
[
("u", ["https://www.flaxnetwork.org/img/branding/flax-logo.svg"]),
("h", "0xD4584AD463139FA8C0D9F68F4B59F185"),
]
)
await time_out_assert(30, wallet_0.get_unconfirmed_balance, 2000000000000)
await time_out_assert(30, wallet_0.get_confirmed_balance, 2000000000000)
sb = await nft_wallet_0.generate_new_nft(metadata)
assert sb
# ensure hints are generated
assert compute_memos(sb)
await time_out_assert_not_none(30, full_node_api.full_node.mempool_manager.get_spendbundle, sb.name())
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
await time_out_assert(10, get_nft_count, 1, nft_wallet_0)
await time_out_assert(10, wallet_0.get_unconfirmed_balance, 4000000000000 - 1)
await time_out_assert(10, wallet_0.get_confirmed_balance, 4000000000000 - 1)
# Test Reorg mint
height = full_node_api.full_node.blockchain.get_peak_height()
if height is None:
assert False
await full_node_api.reorg_from_index_to_new_index(ReorgProtocol(uint32(height - 1), uint32(height + 1), ph1, None))
await time_out_assert(30, get_nft_count, 0, nft_wallet_0)
await time_out_assert(30, get_wallet_number, 1, wallet_node_0.wallet_state_manager)
nft_wallet_0 = await NFTWallet.create_new_nft_wallet(
wallet_node_0.wallet_state_manager, wallet_0, name="NFT WALLET 1"
)
metadata = Program.to(
[
("u", ["https://www.test.net/logo.svg"]),
("h", "0xD4584AD463139FA8C0D9F68F4B59F181"),
]
)
await time_out_assert(10, wallet_0.get_unconfirmed_balance, 4000000000000 - 1)
await time_out_assert(10, wallet_0.get_confirmed_balance, 4000000000000)
sb = await nft_wallet_0.generate_new_nft(metadata)
assert sb
# ensure hints are generated
assert compute_memos(sb)
await time_out_assert_not_none(10, full_node_api.full_node.mempool_manager.get_spendbundle, sb.name())
await time_out_assert(30, wallet_node_0.wallet_state_manager.lock.locked, False)
for i in range(1, num_blocks * 2):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph1))
await time_out_assert(30, get_nft_count, 2, nft_wallet_0)
coins = await nft_wallet_0.get_current_nfts()
assert len(coins) == 2, "nft not generated"
await time_out_assert(30, wallet_0.get_pending_change_balance, 0)
nft_wallet_1 = await NFTWallet.create_new_nft_wallet(
wallet_node_1.wallet_state_manager, wallet_1, name="NFT WALLET 2"
)
txs = await nft_wallet_0.generate_signed_transaction([uint64(coins[1].coin.amount)], [ph1], coins={coins[1].coin})
assert len(txs) == 1
assert txs[0].spend_bundle is not None
await wallet_node_0.wallet_state_manager.add_pending_transaction(txs[0])
await time_out_assert_not_none(
30, full_node_api.full_node.mempool_manager.get_spendbundle, txs[0].spend_bundle.name()
)
assert compute_memos(txs[0].spend_bundle)
for i in range(1, num_blocks * 2):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph1))
await time_out_assert(30, get_nft_count, 1, nft_wallet_0)
await time_out_assert(30, get_nft_count, 1, nft_wallet_1)
coins = await nft_wallet_1.get_current_nfts()
assert len(coins) == 1
await time_out_assert(30, wallet_1.get_pending_change_balance, 0)
# Send it back to original owner
txs = await nft_wallet_1.generate_signed_transaction([uint64(coins[0].coin.amount)], [ph], coins={coins[0].coin})
assert len(txs) == 1
assert txs[0].spend_bundle is not None
await wallet_node_1.wallet_state_manager.add_pending_transaction(txs[0])
await time_out_assert_not_none(
30, full_node_api.full_node.mempool_manager.get_spendbundle, txs[0].spend_bundle.name()
)
assert compute_memos(txs[0].spend_bundle)
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph1))
await time_out_assert(30, wallet_node_0.wallet_state_manager.lock.locked, False)
await time_out_assert(30, get_nft_count, 2, nft_wallet_0)
await time_out_assert(30, get_nft_count, 0, nft_wallet_1)
# Test Reorg
height = full_node_api.full_node.blockchain.get_peak_height()
if height is None:
assert False
await full_node_api.reorg_from_index_to_new_index(ReorgProtocol(uint32(height - 1), uint32(height + 2), ph1, None))
await time_out_assert(30, get_nft_count, 1, nft_wallet_0)
await time_out_assert(30, get_nft_count, 1, nft_wallet_1)
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
async def test_nft_wallet_rpc_creation_and_list(two_wallet_nodes: Any, trusted: Any) -> None:
num_blocks = 3
full_nodes, wallets, _ = two_wallet_nodes
full_node_api = full_nodes[0]
full_node_server = full_node_api.server
wallet_node_0, server_0 = wallets[0]
wallet_node_1, server_1 = wallets[1]
wallet_0 = wallet_node_0.wallet_state_manager.main_wallet
wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
ph = await wallet_0.get_new_puzzlehash()
_ = await wallet_1.get_new_puzzlehash()
if trusted:
wallet_node_0.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
wallet_node_1.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
else:
wallet_node_0.config["trusted_peers"] = {}
wallet_node_1.config["trusted_peers"] = {}
await server_0.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_1.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
funds = sum(
[calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks - 1)]
)
await time_out_assert(30, wallet_0.get_unconfirmed_balance, funds)
await time_out_assert(30, wallet_0.get_confirmed_balance, funds)
await time_out_assert(30, wallet_node_0.wallet_state_manager.synced, True)
api_0 = WalletRpcApi(wallet_node_0)
await time_out_assert(30, wallet_is_synced, True, wallet_node_0, full_node_api)
nft_wallet_0 = await api_0.create_new_wallet(dict(wallet_type="nft_wallet", name="NFT WALLET 1"))
assert isinstance(nft_wallet_0, dict)
assert nft_wallet_0.get("success")
nft_wallet_0_id = nft_wallet_0["wallet_id"]
tr1 = await api_0.nft_mint_nft(
{
"wallet_id": nft_wallet_0_id,
"artist_address": ph,
"hash": "0xD4584AD463139FA8C0D9F68F4B59F185",
"uris": ["https://www.flaxnetwork.org/img/branding/flax-logo.svg"],
}
)
assert isinstance(tr1, dict)
assert tr1.get("success")
sb = tr1["spend_bundle"]
await time_out_assert_not_none(30, full_node_api.full_node.mempool_manager.get_spendbundle, sb.name())
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
await wait_rpc_state_condition(30, api_0.nft_get_nfts, [dict(wallet_id=nft_wallet_0_id)], lambda x: x["nft_list"])
tr2 = await api_0.nft_mint_nft(
{
"wallet_id": nft_wallet_0_id,
"artist_address": ph,
"hash": "0xD4584AD463139FA8C0D9F68F4B59F184",
"uris": ["https://flaxlisp.com/img/logo.svg"],
"meta_uris": [
"https://bafybeigzcazxeu7epmm4vtkuadrvysv74lbzzbl2evphtae6k57yhgynp4.ipfs.nftstorage.link/6590.json"
],
"meta_hash": "0x6a9cb99b7b9a987309e8dd4fd14a7ca2423858585da68cc9ec689669dd6dd6ab",
}
)
assert isinstance(tr2, dict)
assert tr2.get("success")
sb = tr2["spend_bundle"]
await time_out_assert_not_none(30, full_node_api.full_node.mempool_manager.get_spendbundle, sb.name())
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
coins_response = await wait_rpc_state_condition(
5,
api_0.nft_get_nfts,
[{"wallet_id": nft_wallet_0_id}],
lambda x: x["success"] and len(x["nft_list"]) == 2,
)
coins = coins_response["nft_list"]
uris = []
for coin in coins:
assert not coin.supports_did
uris.append(coin.data_uris[0])
assert coin.mint_height > 0
assert len(uris) == 2
assert "https://flaxlisp.com/img/logo.svg" in uris
assert bytes32.fromhex(coins[1].to_json_dict()["nft_coin_id"][2:]) in [x.name() for x in sb.additions()]
coins_response = await wait_rpc_state_condition(
5,
api_0.nft_get_nfts,
[{"wallet_id": nft_wallet_0_id, "start_index": 1, "num": 1}],
lambda x: x["success"] and len(x["nft_list"]) == 1,
)
coins = coins_response["nft_list"]
assert len(coins) == 1
assert coins[0].data_hash.hex() == "0xD4584AD463139FA8C0D9F68F4B59F184"[2:].lower()
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
async def test_nft_wallet_rpc_update_metadata(two_wallet_nodes: Any, trusted: Any) -> None:
from flax.types.blockchain_format.sized_bytes import bytes32
num_blocks = 3
full_nodes, wallets, _ = two_wallet_nodes
full_node_api = full_nodes[0]
full_node_server = full_node_api.server
wallet_node_0, server_0 = wallets[0]
wallet_node_1, server_1 = wallets[1]
wallet_0 = wallet_node_0.wallet_state_manager.main_wallet
wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
ph = await wallet_0.get_new_puzzlehash()
_ = await wallet_1.get_new_puzzlehash()
if trusted:
wallet_node_0.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
wallet_node_1.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
else:
wallet_node_0.config["trusted_peers"] = {}
wallet_node_1.config["trusted_peers"] = {}
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
await server_0.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_1.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
funds = sum(
[calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks - 1)]
)
await time_out_assert(30, wallet_0.get_unconfirmed_balance, funds)
await time_out_assert(30, wallet_0.get_confirmed_balance, funds)
api_0 = WalletRpcApi(wallet_node_0)
await time_out_assert(30, wallet_node_0.wallet_state_manager.synced, True)
await time_out_assert(30, wallet_node_1.wallet_state_manager.synced, True)
await time_out_assert(30, wallet_is_synced, True, wallet_node_0, full_node_api)
nft_wallet_0 = await api_0.create_new_wallet(dict(wallet_type="nft_wallet", name="NFT WALLET 1"))
assert isinstance(nft_wallet_0, dict)
assert nft_wallet_0.get("success")
nft_wallet_0_id = nft_wallet_0["wallet_id"]
# mint NFT
resp = await api_0.nft_mint_nft(
{
"wallet_id": nft_wallet_0_id,
"artist_address": ph,
"hash": "0xD4584AD463139FA8C0D9F68F4B59F185",
"uris": ["https://www.flaxnetwork.org/img/branding/flax-logo.svg"],
}
)
assert resp.get("success")
sb = resp["spend_bundle"]
await time_out_assert_not_none(30, full_node_api.full_node.mempool_manager.get_spendbundle, sb.name())
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
coins_response = await wait_rpc_state_condition(
5, api_0.nft_get_nfts, [dict(wallet_id=nft_wallet_0_id)], lambda x: x["nft_list"]
)
assert coins_response["nft_list"], isinstance(coins_response, dict)
assert coins_response.get("success")
coins = coins_response["nft_list"]
coin = coins[0].to_json_dict()
assert coin["mint_height"] > 0
assert coin["data_hash"] == "0xd4584ad463139fa8c0d9f68f4b59f185"
assert coin["chain_info"] == disassemble(
Program.to(
[
("u", ["https://www.flaxnetwork.org/img/branding/flax-logo.svg"]),
("h", hexstr_to_bytes("0xD4584AD463139FA8C0D9F68F4B59F185")),
("mu", []),
("lu", []),
("sn", uint64(1)),
("st", uint64(1)),
]
)
)
# add another URI using a bech32m nft_coin_id
await time_out_assert(30, wallet_0.get_pending_change_balance, 0)
nft_coin_id = encode_puzzle_hash(
bytes32.from_hexstr(coin["nft_coin_id"]), AddressType.NFT.hrp(api_0.service.config)
)
tr1 = await api_0.nft_add_uri(
{"wallet_id": nft_wallet_0_id, "nft_coin_id": nft_coin_id, "uri": "http://metadata", "key": "mu"}
)
assert isinstance(tr1, dict)
assert tr1.get("success")
coins_response = await api_0.nft_get_nfts(dict(wallet_id=nft_wallet_0_id))
assert coins_response["nft_list"][0].pending_transaction
sb = tr1["spend_bundle"]
await time_out_assert_not_none(30, full_node_api.full_node.mempool_manager.get_spendbundle, sb.name())
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
# check that new URI was added
coins_response = await wait_rpc_state_condition(
5,
api_0.nft_get_nfts,
[dict(wallet_id=nft_wallet_0_id)],
lambda x: x["nft_list"] and len(x["nft_list"][0].metadata_uris) == 1,
)
coins = coins_response["nft_list"]
assert len(coins) == 1
coin = coins[0].to_json_dict()
assert coin["mint_height"] > 0
uris = coin["data_uris"]
assert len(uris) == 1
assert "https://www.flaxnetwork.org/img/branding/flax-logo.svg" in uris
assert len(coin["metadata_uris"]) == 1
assert "http://metadata" == coin["metadata_uris"][0]
assert len(coin["license_uris"]) == 0
# add yet another URI, this time using a hex nft_coin_id
await time_out_assert(30, wallet_0.get_pending_change_balance, 0)
nft_coin_id = coin["nft_coin_id"]
tr1 = await api_0.nft_add_uri(
{
"wallet_id": nft_wallet_0_id,
"nft_coin_id": nft_coin_id,
"uri": "http://data",
"key": "u",
}
)
assert isinstance(tr1, dict)
assert tr1.get("success")
sb = tr1["spend_bundle"]
await time_out_assert_not_none(30, full_node_api.full_node.mempool_manager.get_spendbundle, sb.name())
for i in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
coins_response = await wait_rpc_state_condition(
5,
api_0.nft_get_nfts,
[dict(wallet_id=nft_wallet_0_id)],
lambda x: x["nft_list"] and len(x["nft_list"][0].data_uris) == 2,
)
coins = coins_response["nft_list"]
assert len(coins) == 1
coin = coins[0].to_json_dict()
assert coin["mint_height"] > 0
uris = coin["data_uris"]
assert len(uris) == 2
assert len(coin["metadata_uris"]) == 1
assert "http://data" == coin["data_uris"][0]
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
async def test_nft_with_did_wallet_creation(two_wallet_nodes: Any, trusted: Any) -> None:
num_blocks = 3
full_nodes, wallets, _ = two_wallet_nodes
full_node_api: FullNodeSimulator = full_nodes[0]
full_node_server = full_node_api.server
wallet_node_0, server_0 = wallets[0]
wallet_node_1, server_1 = wallets[1]
wallet_0 = wallet_node_0.wallet_state_manager.main_wallet
api_0 = WalletRpcApi(wallet_node_0)
ph = await wallet_0.get_new_puzzlehash()
if trusted:
wallet_node_0.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
wallet_node_1.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
else:
wallet_node_0.config["trusted_peers"] = {}
wallet_node_1.config["trusted_peers"] = {}
await server_0.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
await server_1.start_client(PeerInfo("localhost", uint16(full_node_server._port)), None)
for _ in range(1, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
funds = sum(
[calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks - 1)]
)
await time_out_assert(30, wallet_0.get_unconfirmed_balance, funds)
await time_out_assert(30, wallet_0.get_confirmed_balance, funds)
did_wallet: DIDWallet = await DIDWallet.create_new_did_wallet(
wallet_node_0.wallet_state_manager, wallet_0, uint64(1)
)
spend_bundle_list = await wallet_node_0.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(did_wallet.id())
spend_bundle = spend_bundle_list[0].spend_bundle
await time_out_assert_not_none(30, full_node_api.full_node.mempool_manager.get_spendbundle, spend_bundle.name())
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
await time_out_assert(30, wallet_0.get_pending_change_balance, 0)
hex_did_id = did_wallet.get_my_DID()
hmr_did_id = encode_puzzle_hash(bytes32.from_hexstr(hex_did_id), AddressType.DID.hrp(wallet_node_0.config))
await time_out_assert(30, wallet_is_synced, True, wallet_node_0, full_node_api)
res = await api_0.create_new_wallet(dict(wallet_type="nft_wallet", name="NFT WALLET 1", did_id=hmr_did_id))
assert isinstance(res, dict)
assert res.get("success")
nft_wallet_0_id = res["wallet_id"]
# this shouldn't work
await time_out_assert(30, wallet_is_synced, True, wallet_node_0, full_node_api)
res = await api_0.create_new_wallet(dict(wallet_type="nft_wallet", name="NFT WALLET 1", did_id=hmr_did_id))
assert res["wallet_id"] == nft_wallet_0_id
# now create NFT wallet with P2 standard puzzle for inner puzzle
await time_out_assert(30, wallet_is_synced, True, wallet_node_0, full_node_api)
res = await api_0.create_new_wallet(dict(wallet_type="nft_wallet", name="NFT WALLET 0"))
assert res["wallet_id"] != nft_wallet_0_id
nft_wallet_p2_puzzle = res["wallet_id"]
res = await api_0.nft_get_by_did({"did_id": hmr_did_id})
assert nft_wallet_0_id == res["wallet_id"]
await time_out_assert(30, wallet_0.get_unconfirmed_balance, 3999999999999)
await time_out_assert(30, wallet_0.get_confirmed_balance, 3999999999999)
res = await api_0.nft_get_wallets_with_dids({})
assert res.get("success")
assert res.get("nft_wallets") == [
{"wallet_id": nft_wallet_0_id, "did_id": hmr_did_id, "did_wallet_id": did_wallet.id()}
]
res = await api_0.nft_get_wallet_did({"wallet_id": nft_wallet_0_id})
assert res.get("success")
assert res.get("did_id") == hmr_did_id
# Create a NFT with DID
nft_ph: bytes32 = await wallet_0.get_new_puzzlehash()
resp = await api_0.nft_mint_nft(
{
"wallet_id": nft_wallet_0_id,
"hash": "0xD4584AD463139FA8C0D9F68F4B59F185",
"uris": ["https://www.flaxnetwork.org/img/branding/flax-logo.svg"],
"target_address": encode_puzzle_hash(nft_ph, "txfx"),
}
)
assert resp.get("success")
sb = resp["spend_bundle"]
# ensure hints are generated correctly
memos = compute_memos(sb)
assert memos
puzhashes = []
for x in memos.values():
puzhashes.extend(list(x))
assert len(puzhashes) > 0
matched = 0
for puzhash in puzhashes:
if puzhash.hex() == nft_ph.hex():
matched += 1
assert matched > 0
await time_out_assert_not_none(30, full_node_api.full_node.mempool_manager.get_spendbundle, sb.name())
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
await time_out_assert(30, wallet_0.get_unconfirmed_balance, 5999999999999 - 1)
await time_out_assert(30, wallet_0.get_confirmed_balance, 5999999999999 - 1)
# Create a NFT without DID, this will go the unassigned NFT wallet
resp = await api_0.nft_mint_nft(
{
"wallet_id": nft_wallet_0_id,
"did_id": "",
"hash": "0xD4584AD463139FA8C0D9F68F4B59F181",
"uris": ["https://url1"],
}
)
assert resp.get("success")
sb = resp["spend_bundle"]
# ensure hints are generated
assert compute_memos(sb)
await time_out_assert_not_none(30, full_node_api.full_node.mempool_manager.get_spendbundle, sb.name())
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
await time_out_assert(30, wallet_0.get_unconfirmed_balance, 7999999999998 - 1)
await time_out_assert(30, wallet_0.get_confirmed_balance, 7999999999998 - 1)
# Check DID NFT
coins_response = await wait_rpc_state_condition(
5, api_0.nft_get_nfts, [dict(wallet_id=nft_wallet_0_id)], lambda x: x["nft_list"]
)
coins = coins_response["nft_list"]
assert len(coins) == 1
did_nft = coins[0].to_json_dict()
assert did_nft["mint_height"] > 0
assert did_nft["supports_did"]
assert did_nft["data_uris"][0] == "https://www.flaxnetwork.org/img/branding/flax-logo.svg"
assert did_nft["data_hash"] == "0xD4584AD463139FA8C0D9F68F4B59F185".lower()
assert did_nft["owner_did"][2:] == hex_did_id
# Check unassigned NFT
nft_wallets = await wallet_node_0.wallet_state_manager.get_all_wallet_info_entries(WalletType.NFT)
assert len(nft_wallets) == 2
coins_response = await wait_rpc_state_condition(
5, api_0.nft_get_nfts, [{"wallet_id": nft_wallet_p2_puzzle}], lambda x: x["nft_list"]
)
assert coins_response["nft_list"]
assert coins_response.get("success")
coins = coins_response["nft_list"]
assert len(coins) == 1
non_did_nft = coins[0].to_json_dict()
assert non_did_nft["mint_height"] > 0
assert non_did_nft["supports_did"]
assert non_did_nft["data_uris"][0] == "https://url1"
assert non_did_nft["data_hash"] == "0xD4584AD463139FA8C0D9F68F4B59F181".lower()
assert non_did_nft["owner_did"] is None
@pytest.mark.parametrize(
"trusted",
[True, False],
)
@pytest.mark.asyncio
async def test_nft_rpc_mint(two_wallet_nodes: Any, trusted: Any) -> None:
num_blocks = 3
full_nodes, wallets, _ = two_wallet_nodes
full_node_api: FullNodeSimulator = full_nodes[0]
full_node_server = full_node_api.server
wallet_node_0, server_0 = wallets[0]
wallet_node_1, server_1 = wallets[1]
wallet_0 = wallet_node_0.wallet_state_manager.main_wallet
wallet_1 = wallet_node_1.wallet_state_manager.main_wallet
api_0 = WalletRpcApi(wallet_node_0)
ph = await wallet_0.get_new_puzzlehash()
ph1 = await wallet_1.get_new_puzzlehash()
if trusted:
wallet_node_0.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
wallet_node_1.config["trusted_peers"] = {
full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()
}
else:
| python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | true |
Flax-Network/flax-blockchain | https://github.com/Flax-Network/flax-blockchain/blob/bb8715f3155bb8011a04cc8c05b3fa8133e4c64b/tests/wallet/nft_wallet/__init__.py | tests/wallet/nft_wallet/__init__.py | python | Apache-2.0 | bb8715f3155bb8011a04cc8c05b3fa8133e4c64b | 2026-01-05T07:13:52.951017Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.