repo stringlengths 7 90 | file_url stringlengths 81 315 | file_path stringlengths 4 228 | content stringlengths 0 32.8k | language stringclasses 1 value | license stringclasses 7 values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 14:38:15 2026-01-05 02:33:18 | truncated bool 2 classes |
|---|---|---|---|---|---|---|---|---|
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/KITCTFCTF/2022/pwn/date/server.py | ctfs/KITCTFCTF/2022/pwn/date/server.py | import tempfile
import signal
import base64
import binascii
from pwn import *
import sys
def handler(signum, frame):
raise OSError("Wakeup")
def main():
signal.signal(signal.SIGALRM, handler)
signal.alarm(60)
try:
b64 = input("Base64 encoded file: ").strip()
except EOFError:
return
try:
js = base64.b64decode(b64)
except binascii.Error:
print("Invalid input", flush=True)
return
if len(js) >= 50000:
print("Invalid input", flush=True)
return
with tempfile.NamedTemporaryFile() as f:
f.write(js)
f.seek(0)
try:
# no jit/wasm for you :)
p = process(["./d8", "--jitless", "--no-expose-wasm", f.name])
p.interactive()
sys.stdout.flush()
except Exception as e:
print(e, flush=True)
if __name__ == "__main__":
main()
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/KITCTFCTF/2022/pwn/sys_jail/deploy/pow.py | ctfs/KITCTFCTF/2022/pwn/sys_jail/deploy/pow.py | import os
import subprocess
import sys
DIFFICULTY = 26
def check(r, token):
return subprocess.call(["hashcash", f"-cyqb{DIFFICULTY}", "-r", r, token]) == 0
def main():
if len(sys.argv) != 2:
print("No command provided", flush=True)
exit()
r = os.urandom(8).hex()
print(f"Send the result of: hashcash -mb{DIFFICULTY} {r}", flush=True)
token = input().replace("hashcash token: ", "").strip()
if check(r, token):
subprocess.call(sys.argv[1], shell=True)
else:
print("Token invalid", flush=True)
sys.exit(1)
if __name__ == "__main__":
main()
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/KITCTFCTF/2022/pwn/movsh/deploy/server.py | ctfs/KITCTFCTF/2022/pwn/movsh/deploy/server.py | from capstone import *
import subprocess
import tempfile
import signal
MAX_SHELLCODE_LEN = 250
md = Cs(CS_ARCH_X86, CS_MODE_64)
def handler(signum, frame):
raise OSError("Wakeup")
def verify_shellcode(shellcode):
# bypassing this filter is not intended
# however if you come up with a bypass feel free to use it
syscall_count = 0
for i in md.disasm(shellcode, 0x0):
if i.mnemonic != "mov" and i.mnemonic != "syscall":
print("Invalid instruction: ")
print(f"{hex(i.address)}:\t{i.mnemonic}\t{i.op_str}")
exit(0)
elif i.mnemonic == "syscall":
if syscall_count < 2:
syscall_count += 1
else:
print(f"Syscall limit reached @ {hex(i.address)}")
exit(0)
else:
pass
def execute(shellcode):
with tempfile.NamedTemporaryFile() as tmp:
tmp.write(shellcode)
tmp.seek(0)
try:
print(subprocess.check_output(f"./shellcode_executor {tmp.name}", shell=True))
except Exception as e:
print(e)
def main():
signal.signal(signal.SIGALRM, handler)
signal.alarm(60)
print(f"Please provide the shellcode in hex format ({MAX_SHELLCODE_LEN} bytes at most)")
shellcode_hex = input("> ")[:MAX_SHELLCODE_LEN].strip().encode().lower()
try:
shellcode_hex = bytes(list(filter(lambda c: chr(c) in "0123456789abcdef", shellcode_hex)))
shellcode = bytes.fromhex(shellcode_hex.decode())
verify_shellcode(shellcode)
# exit properly
shellcode += b"\xb8\x3c\x00\x00\x00\x0f\x05" # mov eax, 0x3c; syscall;
execute(shellcode)
except:
print("Invalid input")
if __name__ == "__main__":
main()
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/KITCTFCTF/2022/crypto/PrimeGuesser1/PrimeGuesser1.py | ctfs/KITCTFCTF/2022/crypto/PrimeGuesser1/PrimeGuesser1.py | #!/usr/bin/env python3
import numpy as np
from numpy.polynomial import polynomial as poly
import random
def polymul(x, y, modulus, poly_mod):
return np.int64(
np.round(poly.polydiv(poly.polymul(x, y) % modulus, poly_mod)[1] % modulus)
)
def polyadd(x, y, modulus, poly_mod):
return np.int64(
np.round(poly.polydiv(poly.polyadd(x, y) % modulus, poly_mod)[1] % modulus)
)
def gen_binary_poly(size):
return np.random.randint(0, 2, size, dtype=np.int64)
def gen_uniform_poly(size, modulus):
return np.random.randint(0, modulus, size, dtype=np.int64)
def gen_normal_poly(size):
return np.int64(np.random.normal(0, 2, size=size))
def keygen(size, modulus, poly_mod):
sk = gen_binary_poly(size)
a = gen_uniform_poly(size, modulus)
e = gen_normal_poly(size)
b = polyadd(polymul(-a, sk, modulus, poly_mod), -e, modulus, poly_mod)
return (b, a), sk
def encrypt(pk, size, q, t, poly_mod, pt):
m = np.array([pt] + [0] * (size - 1), dtype=np.int64) % t
delta = q // t
scaled_m = delta * m % q
e1 = gen_normal_poly(size)
e2 = gen_normal_poly(size)
u = gen_binary_poly(size)
ct0 = polyadd(
polyadd(
polymul(pk[0], u, q, poly_mod),
e1, q, poly_mod),
scaled_m, q, poly_mod
)
ct1 = polyadd(
polymul(pk[1], u, q, poly_mod),
e2, q, poly_mod
)
return (ct0, ct1)
def decrypt(sk, size, q, t, poly_mod, ct):
scaled_pt = polyadd(
polymul(ct[1], sk, q, poly_mod),
ct[0], q, poly_mod
)
decrypted_poly = np.round(scaled_pt * t / q) % t
return int(decrypted_poly[0])
def get_factors(number):
factors = set()
for i in [2, 3, 5, 7, 11, 13, 15, 17, 19, 23]:
if i >= number:
break
if (number % i == 0):
factors.add(i)
other = number // i
prime_set = get_factors(other)
if len(prime_set) == 0:
factors.add(other)
else:
for num in prime_set:
factors.add(num)
return factors
# polynomial modulus degree
n = 2**6 # EXAMPLE !!! ON THE SERVER ARE OTHER NUMBERS
# ciphertext modulus
q = 2**20 # EXAMPLE !!! ON THE SERVER ARE OTHER NUMBERS
# plaintext modulus
t = 2**10 # EXAMPLE !!! ON THE SERVER ARE OTHER NUMBERS
# polynomial modulus
poly_mod = np.array([1] + [0] * (n - 1) + [1])
pk, sk = keygen(n, q, poly_mod)
def oracle(c):
p = decrypt(sk, n, q, t, poly_mod, c)
print(p == 0)
def smart_enrypt(p):
if p > 0 and p < 25:
return encrypt(pk, n, q, t, poly_mod, p)
def loose():
print("Failed")
exit()
def outputCipher(c):
print(''.join([str(l) + "," for l in c[0]])[:-1])
print(''.join([str(l) + "," for l in c[1]])[:-1])
for _ in range(100):
number = random.randint(11, 200)
ct = encrypt(pk, n, q, t, poly_mod, number)
outputCipher(ct)
while True:
choice = int(input("What do you want?\n").strip())
if choice == 0:
number_input = int(input("What do you want to encrypt?\n").strip(), 10)
if number_input > 20 or number_input < 1:
print("Thats out of bound")
else:
outputCipher(smart_enrypt(number_input))
elif choice == 1:
cipher_input = input("What is the first part of the Cipher?\n").strip()
c0 = [int(n, 10) for n in cipher_input.split(",")]
cipher_input = input("What is the second part of the Cipher?\n").strip()
c1 = [int(n, 10) for n in cipher_input.split(",")]
c = (c0, c1)
oracle(c)
elif choice == 2:
break
real_factors = get_factors(number)
primes = input("What are the factors?\n").strip()
if len(primes) == 0:
if len(real_factors) == 0:
continue
else:
loose()
primes_set = set()
for num in primes.split(","):
primes_set.add(int(num, 10))
if not (real_factors == primes_set):
loose()
print("You won: Flag") | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/KITCTFCTF/2022/crypto/PrimeGuesser2/PrimeGuesser2.py | ctfs/KITCTFCTF/2022/crypto/PrimeGuesser2/PrimeGuesser2.py | #!/usr/bin/env python3
import numpy as np
from numpy.polynomial import polynomial as poly
import random
def polymul(x, y, modulus, poly_mod):
return np.int64(
np.round(poly.polydiv(poly.polymul(x, y) % modulus, poly_mod)[1] % modulus)
)
def polyadd(x, y, modulus, poly_mod):
return np.int64(
np.round(poly.polydiv(poly.polyadd(x, y) % modulus, poly_mod)[1] % modulus)
)
def gen_binary_poly(size):
return np.random.randint(0, 2, size, dtype=np.int64)
def gen_uniform_poly(size, modulus):
return np.random.randint(0, modulus, size, dtype=np.int64)
def gen_normal_poly(size):
return np.int64(np.random.normal(0, 2, size=size))
def keygen(size, modulus, poly_mod):
sk = gen_binary_poly(size)
a = gen_uniform_poly(size, modulus)
e = gen_normal_poly(size)
b = polyadd(polymul(-a, sk, modulus, poly_mod), -e, modulus, poly_mod)
return (b, a), sk
def encrypt(pk, size, q, t, poly_mod, pt):
m = np.array([pt] + [0] * (size - 1), dtype=np.int64) % t
delta = q // t
scaled_m = delta * m % q
e1 = gen_normal_poly(size)
e2 = gen_normal_poly(size)
u = gen_binary_poly(size)
ct0 = polyadd(
polyadd(
polymul(pk[0], u, q, poly_mod),
e1, q, poly_mod),
scaled_m, q, poly_mod
)
ct1 = polyadd(
polymul(pk[1], u, q, poly_mod),
e2, q, poly_mod
)
return (ct0, ct1)
def decrypt(sk, size, q, t, poly_mod, ct):
scaled_pt = polyadd(
polymul(ct[1], sk, q, poly_mod),
ct[0], q, poly_mod
)
decrypted_poly = np.round(scaled_pt * t / q) % t
return int(decrypted_poly[0])
def get_factors(number):
factors = set()
for i in [2, 3, 5, 7, 11, 13, 15, 17, 19, 23]:
if i >= number:
break
if (number % i == 0):
factors.add(i)
other = number // i
prime_set = get_factors(other)
if len(prime_set) == 0:
factors.add(other)
else:
for num in prime_set:
factors.add(num)
return factors
# polynomial modulus degree
n = 2**6
# ciphertext modulus
q = 2**20
# plaintext modulus
t = 2**10
# polynomial modulus
poly_mod = np.array([1] + [0] * (n - 1) + [1])
pk, sk = keygen(n, q, poly_mod)
def oracle(c):
p = decrypt(sk, n, q, t, poly_mod, c)
print(p == 0)
def loose():
print("Failed")
exit()
def outputCipher(c):
print(''.join([str(l) + "," for l in c[0]])[:-1])
print(''.join([str(l) + "," for l in c[1]])[:-1])
for _ in range(100):
number = random.randint(11, 200)
ct = encrypt(pk, n, q, t, poly_mod, number)
outputCipher(ct)
while True:
choice = int(input("What do you want?\n").strip())
if choice == 0:
cipher_input = input("What is the first part of the Cipher?\n").strip()
c0 = [int(n, 10) for n in cipher_input.split(",")]
cipher_input = input("What is the second part of the Cipher?\n").strip()
c1 = [int(n, 10) for n in cipher_input.split(",")]
c = (c0, c1)
oracle(c)
elif choice == 1:
break
real_factors = get_factors(number)
primes = input("What are the factors?\n").strip()
if len(primes) == 0:
if len(real_factors) == 0:
continue
else:
loose()
primes_set = set()
for num in primes.split(","):
primes_set.add(int(num, 10))
if not (real_factors == primes_set):
loose()
print("You won: FLAG") | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/KITCTFCTF/2022/crypto/CrookedRoulette/Crooked_Roulette.py | ctfs/KITCTFCTF/2022/crypto/CrookedRoulette/Crooked_Roulette.py | #!/usr/bin/env python3
from math import gcd
from Crypto.Util.number import getPrime,getRandomInteger
flag = "KITCTF{fake_flag}"
p = getPrime(512)
q = getPrime(512)
n = p*q
phi = (p-1)*(q-1)
e = getPrime(256)
while gcd(e, phi) != 1:
e = getPrime(256)
d = pow(e, -1, phi)
def sign(m):
return pow(m, d, n)
def check(c, m):
return pow(c, e, n) == m
result = getRandomInteger(256)
print(f"Number of pockets: {hex(n)}")
print(f"The Manager told me, the roulette is crooked and will hit {hex(result)}")
base = 16
m2 = int(input(f"What should I bet? "), base)
if m2 % n == result:
print("It is too obvious if I bet that")
else:
s2 = sign(m2)
print(f"My Signatur is {hex(s2)}")
message = int(input(f"What do you want to bet? "), base)
signature = int(input(f"Please sign your bet "), base)
if result == message and check(signature, message):
print(f"You Win: {flag}")
else:
print("You Lose")
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/SUSCTF/2022/pwn/mujs/genucd.py | ctfs/SUSCTF/2022/pwn/mujs/genucd.py | # Create utfdata.h from UnicodeData.txt
tolower = []
toupper = []
isalpha = []
for line in open("UnicodeData.txt").readlines():
line = line.split(";")
code = int(line[0],16)
# if code > 65535: continue # skip non-BMP codepoints
if line[2][0] == 'L':
isalpha.append(code)
if line[12]:
toupper.append((code,int(line[12],16)))
if line[13]:
tolower.append((code,int(line[13],16)))
def dumpalpha():
table = []
prev = 0
start = 0
for code in isalpha:
if code != prev+1:
if start:
table.append((start,prev))
start = code
prev = code
table.append((start,prev))
print("")
print("static const Rune ucd_alpha2[] = {")
for a, b in table:
if b - a > 0:
print(hex(a)+","+hex(b)+",")
print("};");
print("")
print("static const Rune ucd_alpha1[] = {")
for a, b in table:
if b - a == 0:
print(hex(a)+",")
print("};");
def dumpmap(name, input):
table = []
prev_a = 0
prev_b = 0
start_a = 0
start_b = 0
for a, b in input:
if a != prev_a+1 or b != prev_b+1:
if start_a:
table.append((start_a,prev_a,start_b))
start_a = a
start_b = b
prev_a = a
prev_b = b
table.append((start_a,prev_a,start_b))
print("")
print("static const Rune " + name + "2[] = {")
for a, b, n in table:
if b - a > 0:
print(hex(a)+","+hex(b)+","+str(n-a)+",")
print("};");
print("")
print("static const Rune " + name + "1[] = {")
for a, b, n in table:
if b - a == 0:
print(hex(a)+","+str(n-a)+",")
print("};");
print("/* This file was automatically created from UnicodeData.txt */")
dumpalpha()
dumpmap("ucd_tolower", tolower)
dumpmap("ucd_toupper", toupper)
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/SUSCTF/2022/crypto/large_case/problem.py | ctfs/SUSCTF/2022/crypto/large_case/problem.py | from Crypto.Util.number import *
from secret import e,message
def pad(s):
if len(s)<3*L:
s+=bytes(3*L-len(s))
return s
L=128
p=127846753573603084140032502367311687577517286192893830888210505400863747960458410091624928485398237221748639465569360357083610343901195273740653100259873512668015324620239720302434418836556626441491996755736644886234427063508445212117628827393696641594389475794455769831224080974098671804484986257952189021223
q=145855456487495382044171198958191111759614682359121667762539436558951453420409098978730659224765186993202647878416602503196995715156477020462357271957894750950465766809623184979464111968346235929375202282811814079958258215558862385475337911665725569669510022344713444067774094112542265293776098223712339100693
r=165967627827619421909025667485886197280531070386062799707570138462960892786375448755168117226002965841166040777799690060003514218907279202146293715568618421507166624010447447835500614000601643150187327886055136468260391127675012777934049855029499330117864969171026445847229725440665179150874362143944727374907
n=p*q*r
assert isPrime(GCD(e,p-1)) and isPrime(GCD(e,q-1)) and isPrime(GCD(e,r-1)) and e==GCD(e,p-1)*GCD(e,q-1)*GCD(e,r-1)
assert len(message)>L and len(message)<2*L
assert b'SUSCTF' in message
m=bytes_to_long(pad(message))
c=pow(m,e,n)
print(c)
'''
2832775557487418816663494645849097066925967799754895979829784499040437385450603537732862576495758207240632734290947928291961063611897822688909447511260639429367768479378599532712621774918733304857247099714044615691877995534173849302353620399896455615474093581673774297730056975663792651743809514320379189748228186812362112753688073161375690508818356712739795492736743994105438575736577194329751372142329306630950863097761601196849158280502041616545429586870751042908365507050717385205371671658706357669408813112610215766159761927196639404951251535622349916877296956767883165696947955379829079278948514755758174884809479690995427980775293393456403529481055942899970158049070109142310832516606657100119207595631431023336544432679282722485978175459551109374822024850128128796213791820270973849303929674648894135672365776376696816104314090776423931007123128977218361110636927878232444348690591774581974226318856099862175526133892
''' | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/SUSCTF/2022/crypto/Ez_Pager_Tiper/problem/magic_box.py | ctfs/SUSCTF/2022/crypto/Ez_Pager_Tiper/problem/magic_box.py | class lfsr():
def __init__(self, seed, mask, length):
self.length_mask = 2 ** length - 1
self.mask = mask & self.length_mask
self.state = seed & self.length_mask
def next(self):
next_state = (self.state << 1) & self.length_mask
i = self.state & self.mask & self.length_mask
output = 0
while i != 0:
output ^= (i & 1)
i = i >> 1
next_state ^= output
self.state = next_state
return output
def getrandbit(self, nbit):
output = 0
for _ in range(nbit):
output = (output << 1) ^ self.next()
return output
class generator():
def __init__(self, lfsr1, lfsr2, magic):
self.lfsr1 = lfsr1
self.lfsr2 = lfsr2
self.magic = magic
def infinit_power(self, magic):
return int(magic)
def malicious_magic(self, magic):
now = (-magic & magic)
magic ^= now
return int(now), int(magic)
def confusion(self, c1, c2):
magic = self.magic
output, cnt = magic, 0
output ^= c1 ^ c2
while magic:
now, magic = self.malicious_magic(magic)
cnt ^= now >> (now.bit_length() - 1)
output ^= now
output ^= cnt * c1
return int(output)
def getrandbit(self, nbit):
output1 = self.lfsr1.getrandbit(nbit)
output2 = self.lfsr2.getrandbit(nbit)
return self.confusion(output1, output2) | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/SUSCTF/2022/crypto/Ez_Pager_Tiper/problem/problem.py | ctfs/SUSCTF/2022/crypto/Ez_Pager_Tiper/problem/problem.py | from Crypto.Util.number import *
from magic_box import *
from secret import mask1, mask2, seed1, seed2, seed3
n1, n2 = 64, 12
flag = 'SUSCTF{***}'
def encrypt(cipher, ipath, opath):
ifile=open(ipath,'rb')
ofile=open(opath,'wb')
plaintext=ifile.read()
for ch in plaintext:
c=ch^cipher.getrandbit(8)
ofile.write(long_to_bytes(c))
ifile.close()
ofile.close()
def problem1():
r = getRandomInteger(6)
magic = 1<<r
lfsr1 = lfsr(seed1, mask1, n1)
lfsr2 = lfsr(seed2, mask2, n2)
cipher = generator(lfsr1, lfsr2, magic)
encrypt(cipher, "MTk4NC0wNC0wMQ==_6d30.txt", "MTk4NC0wNC0wMQ==_6d30.enc")
def problem2():
magic = getPrime(64)
lfsr1=lfsr(seed1, mask1, n1)
lfsr2=lfsr(seed3, mask2, n2)
cipher = generator(lfsr1, lfsr2, magic)
encrypt(cipher, "MTk4NC0xMi0yNQ==_76ff.txt", "MTk4NC0xMi0yNQ==_76ff.enc")
# flag in it?
print(f'hint={magic}')
# hint = 15193544052573546419
problem1()
problem2()
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/SUSCTF/2022/crypto/InverseProblem/problem.py | ctfs/SUSCTF/2022/crypto/InverseProblem/problem.py | import numpy as np
from secret import flag
def gravity(n,d=0.25):
A=np.zeros([n,n])
for i in range(n):
for j in range(n):
A[i,j]=d/n*(d**2+((i-j)/n)**2)**(-1.5)
return A
n=len(flag)
A=gravity(n)
x=np.array(list(flag))
b=A@x
np.savetxt('b.txt',b) | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/SUSCTF/2022/crypto/SpecialCurve3/problem.py | ctfs/SUSCTF/2022/crypto/SpecialCurve3/problem.py | from Crypto.Util.number import *
from secret import flag,getMyPrime
import hashlib
import random
class SpecialCurve:
def __init__(self,p,a,b):
self.p=p
self.a=a
self.b=b
def __str__(self):
return f'SpecialCurve({self.p},{self.a},{self.b})'
def add(self,P1,P2):
x1,y1=P1
x2,y2=P2
if x1==0:
return P2
elif x2==0:
return P1
elif x1==x2 and (y1+y2)%self.p==0:
return (0,0)
if P1==P2:
t=(2*self.a*x1-self.b)*inverse(2*y1,self.p)%self.p
else:
t=(y2-y1)*inverse(x2-x1,self.p)%self.p
x3=self.b*inverse(self.a-t**2,self.p)%self.p
y3=x3*t%self.p
return (x3,y3)
def mul(self,P,k):
assert k>=0
Q=(0,0)
while k>0:
if k%2:
k-=1
Q=self.add(P,Q)
else:
k//=2
P=self.add(P,P)
return Q
def problem(size,k):
p=getMyPrime(size)
x=random.randint(1,p-1)
y=random.randint(1,p-1)
e=random.randint(1,p-1)
a=k*random.randint(1,p-1)**2%p
b=(a*x**2-y**2)*inverse(x,p)%p
curve=SpecialCurve(p,a,b)
G=(x,y)
Q=curve.mul(G,e)
print(f'curve={curve}')
print(f'G={G}')
print(f'Q={Q}')
return e
e1=problem(128,1)
e2=problem(256,0)
e3=problem(512,-1)
enc=bytes_to_long(hashlib.sha512(b'%d-%d-%d'%(e1,e2,e3)).digest())^bytes_to_long(flag.encode())
print(f'enc={enc}')
'''
curve=SpecialCurve(233083587295210134948821000868826832947,73126617271517175643081276880688551524,88798574825442191055315385745016140538)
G=(183831340067417420551177442269962013567, 99817328357051895244693615825466756115)
Q=(166671516040968894138381957537903638362, 111895361471674668502480740000666908829)
curve=SpecialCurve(191068609532021291665270648892101370598912795286064024735411416824693692132923,0,58972296113624136043935650439499285317465012097982529049067402580914449774185)
G=(91006613905368145804676933482275735904909223655198185414549961004950981863863, 96989919722797171541882834089135074413922451043302800296198062675754293402989)
Q=(13504049588679281286169164714588439287464466303764421302084687307396426249546, 110661224324697604640962229701359894201176516005657224773855350780007949687952)
curve=SpecialCurve(52373730653143623993722188411805072409768054271090317191163373082830382186155222057388907031638565243831629283127812681929449631957644692314271061305360051,28655236915186704327844312279364325861102737672471191366040478446302230316126579253163690638394777612892597409996413924040027276002261574013341150279408716,42416029226399083779760024372262489355327595236815424404537477696856946194575702884812426801334149232783155054432357826688204061261064100317825443760789993)
G=(15928930551986151950313548861530582114536854007449249930339281771205424453985946290830967245733880747219865184207937142979512907006835750179101295088805979, 29726385672383966862722624018664799344530038744596171136235079529609085682764414035677068447708040589338778102975312549905710028842378574272316925268724240)
Q=(38121552296651560305666865284721153617113944344833289618523344614838728589487183141203437711082603199613749216407692351802119887009907921660398772094998382, 26933444836972639216676645467487306576059428042654421228626400416790420281717654664520663525738892984862698457685902674487454159311739553538883303065780163)
enc=4161358072766336252252471282975567407131586510079023869994510082082055094259455767245295677764252219353961906640516887754903722158044643700643524839069337
''' | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/H7CTFInternational/2024/crypto/I_Lost_My_Bottoms/enc.py | ctfs/H7CTFInternational/2024/crypto/I_Lost_My_Bottoms/enc.py | from Crypto.Util.number import *
p = getPrime(1024)
bits = 128
m = bytes_to_long(b"REDACTED")
hints = [pow(m , -1 , p) , pow(m+1 , -2 , p)]
hints_leak = [(i>>bits)<<bits for i in hints]
print(f'p = {p}')
print(f'hints_leak = {hints_leak}')
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/H7CTFInternational/2025/pwn/0x0f05/chal.py | ctfs/H7CTFInternational/2025/pwn/0x0f05/chal.py | import sys
import mmap
import ctypes
from capstone import Cs, CS_ARCH_X86, CS_MODE_64
from prettytable import PrettyTable
def disassemble(shellcode):
md = Cs(CS_ARCH_X86, CS_MODE_64)
table = PrettyTable()
table.field_names = ["Address", "Bytes", "Instruction"]
table.align["Address"] = "l"
table.align["Bytes"] = "l"
table.align["Instruction"] = "l"
instructions = []
for insn in md.disasm(shellcode, 0x1000):
address = f"0x{insn.address:08x}"
bytes_hex = ' '.join(f"{b:02x}" for b in insn.bytes)
instruction = f"{insn.mnemonic} {insn.op_str}".strip()
table.add_row([address, bytes_hex, instruction])
instructions.append((insn.mnemonic, insn.op_str))
print("\n[+] Disassembled Instructions:")
print(table)
return instructions
def check_syscall(instructions):
for mnemonic, op_str in instructions:
if mnemonic == "syscall" or (mnemonic == "int" and op_str == "0x80"):
print("[!] Detected syscall or int 0x80. Execution blocked.")
sys.exit(1)
def execute_shellcode(shellcode):
size = len(shellcode)
mem = mmap.mmap(-1, size, mmap.MAP_PRIVATE | mmap.MAP_ANONYMOUS, mmap.PROT_READ | mmap.PROT_WRITE | mmap.PROT_EXEC)
mem.write(shellcode)
mem.seek(0)
libc = ctypes.CDLL(None)
libc.mprotect.argtypes = (ctypes.c_void_p, ctypes.c_size_t, ctypes.c_int)
libc.mprotect(ctypes.c_void_p(ctypes.addressof(ctypes.c_char.from_buffer(mem))), size, mmap.PROT_READ | mmap.PROT_EXEC)
shell_func = ctypes.CFUNCTYPE(None)(ctypes.addressof(ctypes.c_char.from_buffer(mem)))
print("[+] Executing shellcode...")
try:
shell_func()
except Exception as e:
print(f"[!] Error executing shellcode: {e}")
if __name__ == "__main__":
print("0xWelcome to ex-code-0x02!")
print("[+] Enter shellcode (hex format, without 0x prefix):")
hex_shellcode = input().strip()
shellcode = bytes.fromhex(hex_shellcode)
print("[+] Disassembling shellcode...")
instructions = disassemble(shellcode)
print("[+] Checking for restricted syscalls...")
check_syscall(instructions)
execute_shellcode(shellcode)
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/H7CTFInternational/2025/crypto/OAEPsy/chall.py | ctfs/H7CTFInternational/2025/crypto/OAEPsy/chall.py | #!/usr/bin/env python3
import os, base64, math, hashlib
from flask import Flask, request, jsonify, abort
from Crypto.PublicKey import RSA
from Crypto.Cipher import PKCS1_OAEP
import secret
app = Flask(__name__)
RSA_BITS = int(os.environ.get("RSA_BITS", "2048"))
MAX_ORACLE_QUERIES = int(os.environ.get("MAX_ORACLE_QUERIES", "20000"))
key = RSA.generate(RSA_BITS)
n = key.n
e = key.e
k = (n.bit_length() + 7) // 8
B = 1 << (8 * (k - 1))
def derive_r(n):
h = hashlib.sha256(str(n).encode()).digest()
candidate = int.from_bytes(h, "big") % (n - 3) + 2
while math.gcd(candidate, n) != 1:
candidate = (candidate + 1) % (n - 2) + 2
return candidate
R_CONST = derive_r(n)
if math.gcd(R_CONST, n) != 1:
raise SystemExit("R_CONST not invertible with n")
cipher_pub = PKCS1_OAEP.new(key.publickey())
ciphertext = cipher_pub.encrypt(secret.FLAG)
cipher_b64 = base64.b64encode(ciphertext).decode()
oracle_queries = 0
def bytes_to_int(b): return int.from_bytes(b, "big")
def int_to_bytes(i, length): return i.to_bytes(length, "big")
@app.route("/pub", methods=["GET"])
def pub():
return jsonify({"n": hex(n), "e": e, "ciphertext_b64": cipher_b64})
@app.route("/oracle", methods=["POST"])
def oracle():
global oracle_queries
if oracle_queries >= MAX_ORACLE_QUERIES:
abort(429)
data = request.get_json(force=True, silent=True)
if not data or "ct_b64" not in data:
abort(400)
try:
ct_bytes = base64.b64decode(data["ct_b64"])
except Exception:
abort(400)
c_int = bytes_to_int(ct_bytes)
if c_int >= n or c_int < 0:
return jsonify({"res": False})
m_int = pow(c_int, key.d, n)
oracle_queries += 1
val = (R_CONST * m_int) % n
res = (val >= B)
return jsonify({"res": bool(res)})
if __name__ == "__main__":
app.run(host="0.0.0.0", port=int(os.environ.get("PORT", "6137")))
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/H7CTFInternational/2025/crypto/Kakashi_s_Secret_Jutsu/source.py | ctfs/H7CTFInternational/2025/crypto/Kakashi_s_Secret_Jutsu/source.py | #!/usr/bin/env python3
import socketserver
import signal
import string, random
from hashlib import sha256
from Crypto.Cipher import AES
from secret import get_key, get_IV, get_secret, get_flag, perform_encryption
KAKASHI_BANNER = br"""
/$$ /$$ /$$$$$$$$ /$$$$$$ /$$$$$$$$ /$$$$$$$$
| $$ | $$|_____ $$//$$__ $$|__ $$__/| $$_____/
| $$ | $$ /$$/| $$ \__/ | $$ | $$
| $$$$$$$$ /$$/ | $$ | $$ | $$$$$
| $$__ $$ /$$/ | $$ | $$ | $$__/
| $$ | $$ /$$/ | $$ $$ | $$ | $$
| $$ | $$ /$$/ | $$$$$$/ | $$ | $$
|__/ |__/|__/ \______/ |__/ |__/
"""
JUTSU_MENU = br"""
1. Perform Jutsu
2. Test Your Sharingan
3. Die
"""
class KakashiTask(socketserver.BaseRequestHandler):
def _recvall(self):
BUFF_SIZE = 2048
data = b''
while True:
part = self.request.recv(BUFF_SIZE)
data += part
if len(part) < BUFF_SIZE:
break
return data.strip()
def send(self, msg, newline=True):
try:
if newline:
msg += b'\n'
self.request.sendall(msg)
except:
pass
def recv(self, prompt=b'> '):
self.send(prompt, newline=False)
return self._recvall()
def recvhex(self, prompt=b'> '):
self.send(prompt, newline=False)
try:
data = bytes.fromhex(self._recvall().decode('latin-1'))
except ValueError as e:
self.send(b"Wrong hex value!")
self.close()
return None
return data
def close(self):
self.send(b"Farewell, shinobi~")
self.request.close()
def pad(self, data):
pad_len = 16 - len(data)%16
return data + bytes([pad_len])*pad_len
def proof_of_work(self):
proof = ''.join([random.choice(string.ascii_letters+string.digits) for _ in range(20)])
_hexdigest = sha256(proof.encode()).hexdigest()
self.send(f"sha256(XXXX+{proof[4:]}) == {_hexdigest}".encode())
x = self.recv(prompt=b'Give me XXXX: ')
if len(x) != 4 or sha256(x+proof[4:].encode()).hexdigest() != _hexdigest:
return False
return True
def handle(self):
signal.alarm(1200)
self.send(KAKASHI_BANNER)
if not self.proof_of_work():
return
secret = get_secret()
key = get_key()
IV = get_IV()
flag = get_flag()
aes = AES.new(key, mode=AES.MODE_CBC, iv=IV)
self.send(f"Kakashi says — IV is: {IV.hex()}".encode())
self.send(b"If your Sharingan can read the secret, bring it to me and I shall reward you with a scroll~!")
while True:
self.send(JUTSU_MENU, newline=False)
choice = self.recv()
if choice == b"1":
msg = self.recvhex(prompt=b"Your jutsu (in hex): ")
if not msg: break
cipher = perform_encryption(aes, msg, secret, self.pad)
self.send(cipher.hex().encode())
continue
elif choice == b"2":
guess = self.recvhex(prompt=b"What did your Sharingan see? (in hex): ")
if not guess: break
if guess == secret:
self.send(b"Kakashi: Impressive. Here is your scroll : " + flag)
else:
self.send(b"H7CTF{Wrong_Guess!}")
self.close()
break
class KakashiThreadedServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
pass
class KakashiForkedServer(socketserver.ForkingMixIn, socketserver.TCPServer):
pass
if __name__ == "__main__":
HOST, PORT = '0.0.0.0', 10000
server = KakashiForkedServer((HOST, PORT), KakashiTask)
server.allow_reuse_address = True
server.serve_forever() | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/snakeCTF/2023/misc/stressful_reader/jail.py | ctfs/snakeCTF/2023/misc/stressful_reader/jail.py | #!/usr/bin/env python3
import os
banner = r"""
_____ _ __ _ _
/ ___| | / _| | | | |
\ `--.| |_ _ __ ___ ___ ___| |_ _ _| | _ __ ___ __ _ __| | ___ _ __
`--. \ __| '__/ _ \/ __/ __| _| | | | | | '__/ _ \/ _` |/ _` |/ _ \ '__|
/\__/ / |_| | | __/\__ \__ \ | | |_| | | | | | __/ (_| | (_| | __/ |
\____/ \__|_| \___||___/___/_| \__,_|_| |_| \___|\__,_|\__,_|\___|_|
"""
class Jail():
def __init__(self) -> None:
print(banner)
print()
print()
print("Will you be able to read the $FLAG?")
print("> ",end="")
self.F = ""
self.L = ""
self.A = ""
self.G = ""
self.run_code(input())
pass
def run_code(self, code):
badchars = [ 'c', 'h', 'j', 'k', 'n', 'o', 'p', 'q', 'u', 'w', 'x', 'y', 'z'
, 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N'
, 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W'
, 'X', 'Y', 'Z', '!', '"', '#', '$', '%'
, '&', '\'', '-', '/', ';', '<', '=', '>', '?', '@'
, '[', '\\', ']', '^', '`', '{', '|', '}', '~'
, '0', '1', '2', '3', '4', '5', '6', '7', '8', '9']
badwords = ["aiter", "any", "ascii", "bin", "bool", "breakpoint"
, "callable", "chr", "classmethod", "compile", "dict"
, "enumerate", "eval", "exec", "filter", "getattr"
, "globals", "input", "iter", "next", "locals", "memoryview"
, "next", "object", "open", "print", "setattr"
, "staticmethod", "vars", "__import__", "bytes", "keys", "str"
, "join", "__dict__", "__dir__", "__getstate__", "upper"]
if (code.isascii() and
all([x not in code for x in badchars]) and
all([x not in code for x in badwords])):
exec(code)
else:
print("Exploiting detected, plz halp :/")
def get_var(self, varname):
print(os.getenv(varname))
if (__name__ == "__main__"):
Jail()
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/snakeCTF/2023/crypto/strongest_cipher/utilities.py | ctfs/snakeCTF/2023/crypto/strongest_cipher/utilities.py | def binary_encode_message(message: int, N_bits: int):
bit_list = []
for i in range(N_bits):
bit_list.append((message>>i) & 0x1)
return bit_list
def decode_message(encoded_message: list, p: int):
m = 0
threshold = 2 if(p == 2) else ((p+1)>>1)
for i,c in enumerate(encoded_message):
if c >= threshold:
c = -(p-c)
else:
c = c
m = (m + (c * pow(2,i)))
return m
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/snakeCTF/2023/crypto/thefork/service.py | ctfs/snakeCTF/2023/crypto/thefork/service.py | import forkaes
import os
import signal
TIMEOUT = 600
assert("KEY" in os.environ)
KEY = os.environ["KEY"]
KEY = [ x for x in bytes.fromhex(KEY)]
assert len(KEY) == forkaes.BLOCK_SIZE
def main():
print("TheFORK oracle is here!")
tweak = [ int.from_bytes(os.urandom(1), byteorder='big') for _ in range(forkaes.BLOCK_SIZE)]
plaintext = [ int.from_bytes(os.urandom(1), byteorder='big') for _ in range(forkaes.BLOCK_SIZE)]
left_ct, right_ct = forkaes.encrypt(plaintext, KEY, tweak)
print("Try to find the key we used to encrypt the following plaintext:")
print(plaintext)
print()
print(f'The tweak we used is:')
print(tweak)
print()
print("The corresponding left and right ciphertexts are:")
print(f'Left: {left_ct}')
print(f'Rigth: {right_ct}')
print()
while True:
print("""
MENU:
1) Compute sibling
2) Exit
""")
choice = input("> ")
if choice == "1":
# Below line read inputs from user using map() function
print(f"The ciphertext and the tweak should be represented as a list of values space separated such as 2 3 4 5 6\nYou can input only {forkaes.BLOCK_SIZE} numbers")
ciphertext = list(map(int,input("\nEnter the ciphertext : ").strip().split(',')))[:forkaes.BLOCK_SIZE]
tweak = list(map(int,input("\nEnter the tweak : ").strip().split(',')))[:forkaes.BLOCK_SIZE]
side_of_the_ciphertext = input("Side of your ciphertext (possible values are: right | left): ")
if side_of_the_ciphertext != "left" and side_of_the_ciphertext != "right":
print("Wrong side value!")
continue
print("The other ciphertexts is: ")
print(forkaes.compute_sibling(ciphertext, KEY, tweak, side=side_of_the_ciphertext))
elif choice == "2":
break
if __name__ == "__main__":
signal.alarm(TIMEOUT)
main() | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/snakeCTF/2023/crypto/thefork/forkaes.py | ctfs/snakeCTF/2023/crypto/thefork/forkaes.py | from config import *
from AES.aes_utilities import *
from AES.utilities import *
# returns ciphertext_0 and ciphertext_1
def encrypt(plaintext, key, tweak):
keys = key_expansion(key, TOTAL_ROUNDS+1)
current_state = plaintext
for i in range(HEADER_ROUNDS):
t = add(current_state, keys[i])
t = add(t, tweak)
current_state = forward_round(t)
# FORK
current_state_path_0 = current_state
current_state_path_1 = current_state
# PATH 0
for i in range(HEADER_ROUNDS, HEADER_ROUNDS+LEFT_ROUNDS):
t = add(current_state_path_0, keys[i])
t = add(t, tweak)
current_state_path_0 = forward_round(t)
current_state_path_0 = add(current_state_path_0, keys[HEADER_ROUNDS+LEFT_ROUNDS])
current_state_path_0 = add(current_state_path_0, tweak)
# PATH 1
for i in range(HEADER_ROUNDS+LEFT_ROUNDS, TOTAL_ROUNDS):
t = add(current_state_path_1, keys[i])
t = add(t, tweak)
current_state_path_1 = forward_round(t)
current_state_path_1 = add(current_state_path_1, keys[TOTAL_ROUNDS])
current_state_path_1 = add(current_state_path_1, tweak)
return (current_state_path_0, current_state_path_1)
def decrypt(ciphertext, key, tweak, side="left"):
keys = key_expansion(key, TOTAL_ROUNDS+1)
current_state = ciphertext
if side == "left":
for i in range(HEADER_ROUNDS+LEFT_ROUNDS, HEADER_ROUNDS, -1):
t = add(current_state, keys[i])
t = add(t, tweak)
current_state = inverse_round(t)
current_state = add(current_state, keys[HEADER_ROUNDS])
current_state = add(current_state, tweak)
elif side == "right":
for i in range(TOTAL_ROUNDS, HEADER_ROUNDS+LEFT_ROUNDS, -1):
t = add(current_state, keys[i])
t = add(t, tweak)
current_state = inverse_round(t)
current_state = add(current_state, keys[HEADER_ROUNDS+LEFT_ROUNDS])
current_state = add(current_state, tweak)
for i in range(HEADER_ROUNDS-1, -1, -1):
t = inverse_round(current_state)
t = add(t, tweak)
current_state = add(t, keys[i])
return current_state
def compute_sibling(ciphertext, key, tweak, side="left"):
keys = key_expansion(key, TOTAL_ROUNDS+1)
current_state = ciphertext
if side == "left":
for i in range(HEADER_ROUNDS+LEFT_ROUNDS, HEADER_ROUNDS, -1):
t = add(current_state, keys[i])
t = add(t, tweak)
current_state = inverse_round(t)
current_state = add(current_state, keys[HEADER_ROUNDS])
current_state = add(current_state, tweak)
for i in range(HEADER_ROUNDS+LEFT_ROUNDS, TOTAL_ROUNDS):
t = add(current_state, keys[i])
t = add(t, tweak)
current_state = forward_round(t)
current_state = add(current_state, keys[TOTAL_ROUNDS])
current_state = add(current_state, tweak)
elif side == "right":
for i in range(TOTAL_ROUNDS, HEADER_ROUNDS+LEFT_ROUNDS, -1):
t = add(current_state, keys[i])
t = add(t, tweak)
current_state = inverse_round(t)
current_state = add(current_state, keys[HEADER_ROUNDS+LEFT_ROUNDS])
current_state = add(current_state, tweak)
for i in range(HEADER_ROUNDS, HEADER_ROUNDS+LEFT_ROUNDS):
t = add(current_state, keys[i])
t = add(t, tweak)
current_state = forward_round(t)
current_state = add(current_state, keys[HEADER_ROUNDS+LEFT_ROUNDS])
current_state = add(current_state, tweak)
return current_state | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/snakeCTF/2023/crypto/thefork/config.py | ctfs/snakeCTF/2023/crypto/thefork/config.py | HEADER_ROUNDS = 5
LEFT_ROUNDS = 2
RIGHT_ROUNDS = 2
BLOCK_SIZE = 16
TOTAL_ROUNDS = HEADER_ROUNDS + LEFT_ROUNDS + RIGHT_ROUNDS
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/snakeCTF/2023/crypto/thefork/AES/aes_utilities.py | ctfs/snakeCTF/2023/crypto/thefork/AES/aes_utilities.py | from AES.utilities import *
def forward_round(state):
current_state = state
current_state = sub_bytes(current_state)
current_state = shift_rows(current_state)
current_state = mix_columns(current_state)
return current_state
def inverse_round(state):
current_state = state
current_state = inverse_mix_columns(current_state)
current_state = inverse_shift_row(current_state)
current_state = inverse_sub_bytes(current_state)
return current_state | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/snakeCTF/2023/crypto/thefork/AES/utilities.py | ctfs/snakeCTF/2023/crypto/thefork/AES/utilities.py | from AES.data import *
def shift_rows(current_state):
output = [0 for _ in range(16)]
for i in range(0,4):
for j in range(4):
output[j+4*i] = (current_state[j+((j+i)*4)%16])
return output
def inverse_shift_row(current_state):
output = [0 for _ in range(16)]
for i in range(0,4):
for j in range(4):
output[j+4*i] = (current_state[j+((i+(4-j))*4)%16])
return output
def mix_columns(current_state):
output = []
for i in range(4):
output.append((mul2[current_state[(4*i)+0]] ^ mul_3[current_state[(4*i)+1]] ^ current_state[(4*i)+2] ^ current_state[(4*i)+3]) & 255)
output.append((current_state[(4*i)+0] ^ mul2[current_state[(4*i)+1]] ^ mul_3[current_state[(4*i)+2]] ^ current_state[(4*i)+3]) & 255)
output.append((current_state[(4*i)+0] ^ current_state[(4*i)+1] ^ mul2[current_state[(4*i)+2]] ^ mul_3[current_state[(4*i)+3]]) & 255)
output.append((mul_3[current_state[(4*i)+0]] ^ current_state[(4*i)+1] ^ current_state[(4*i)+2] ^ mul2[current_state[(4*i)+3]]) & 255)
return output
def inverse_mix_columns(current_state):
output = []
for i in range(4):
output.append((mul_14[current_state[(4*i)+0]] ^ mul_11[current_state[(4*i)+1]] ^ mul_13[current_state[(4*i)+2]] ^ mul_9[current_state[(4*i)+3]]) & 255)
output.append((mul_9[current_state[(4*i)+0]] ^ mul_14[current_state[(4*i)+1]] ^ mul_11[current_state[(4*i)+2]] ^ mul_13[current_state[(4*i)+3]]) & 255)
output.append((mul_13[current_state[(4*i)+0]] ^ mul_9[current_state[(4*i)+1]] ^ mul_14[current_state[(4*i)+2]] ^ mul_11[current_state[(4*i)+3]]) & 255)
output.append((mul_11[current_state[(4*i)+0]] ^ mul_13[current_state[(4*i)+1]] ^ mul_9[current_state[(4*i)+2]] ^ mul_14[current_state[(4*i)+3]]) & 255)
return output
def add(current_state, item):
output = []
for i in range(16):
output.append(current_state[i] ^ item[i])
return output
def sub_bytes(current_state):
output = []
for i in range(16):
output.append(SBOX[current_state[i]])
return output
def inverse_sub_bytes(current_state):
output = []
for i in range(16):
output.append(INV_SBOX[current_state[i]])
return output
def key_expansion(key, total_rounds):
keys = [ [] for i in range(total_rounds)]
# K0 = Key
for i in range(16):
keys[0].append(key[i])
for i in range(1,total_rounds):
keys[i].append(SBOX[ keys[i-1][13] ] ^ keys[i-1][0] ^ Rcon[ i ])
keys[i].append(SBOX[ keys[i-1][14] ] ^ keys[i-1][1])
keys[i].append(SBOX[ keys[i-1][14] ] ^ keys[i-1][2])
keys[i].append(SBOX[ keys[i-1][12] ] ^ keys[i-1][3])
keys[i].append(keys[i-1][4] ^ keys[i][0])
keys[i].append(keys[i-1][5] ^ keys[i][1])
keys[i].append(keys[i-1][6] ^ keys[i][2])
keys[i].append(keys[i-1][7] ^ keys[i][3])
keys[i].append(keys[i-1][8] ^ keys[i][4])
keys[i].append(keys[i-1][9] ^ keys[i][5])
keys[i].append(keys[i-1][10] ^ keys[i][6])
keys[i].append(keys[i-1][11] ^ keys[i][7])
keys[i].append(keys[i-1][12] ^ keys[i][8])
keys[i].append(keys[i-1][13] ^ keys[i][9])
keys[i].append(keys[i-1][14] ^ keys[i][10])
keys[i].append(keys[i-1][15] ^ keys[i][11])
return keys | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/snakeCTF/2023/crypto/thefork/AES/data.py | ctfs/snakeCTF/2023/crypto/thefork/AES/data.py | mul2 = [
0x00,0x02,0x04,0x06,0x08,0x0a,0x0c,0x0e,0x10,0x12,0x14,0x16,0x18,0x1a,0x1c,0x1e,
0x20,0x22,0x24,0x26,0x28,0x2a,0x2c,0x2e,0x30,0x32,0x34,0x36,0x38,0x3a,0x3c,0x3e,
0x40,0x42,0x44,0x46,0x48,0x4a,0x4c,0x4e,0x50,0x52,0x54,0x56,0x58,0x5a,0x5c,0x5e,
0x60,0x62,0x64,0x66,0x68,0x6a,0x6c,0x6e,0x70,0x72,0x74,0x76,0x78,0x7a,0x7c,0x7e,
0x80,0x82,0x84,0x86,0x88,0x8a,0x8c,0x8e,0x90,0x92,0x94,0x96,0x98,0x9a,0x9c,0x9e,
0xa0,0xa2,0xa4,0xa6,0xa8,0xaa,0xac,0xae,0xb0,0xb2,0xb4,0xb6,0xb8,0xba,0xbc,0xbe,
0xc0,0xc2,0xc4,0xc6,0xc8,0xca,0xcc,0xce,0xd0,0xd2,0xd4,0xd6,0xd8,0xda,0xdc,0xde,
0xe0,0xe2,0xe4,0xe6,0xe8,0xea,0xec,0xee,0xf0,0xf2,0xf4,0xf6,0xf8,0xfa,0xfc,0xfe,
0x1b,0x19,0x1f,0x1d,0x13,0x11,0x17,0x15,0x0b,0x09,0x0f,0x0d,0x03,0x01,0x07,0x05,
0x3b,0x39,0x3f,0x3d,0x33,0x31,0x37,0x35,0x2b,0x29,0x2f,0x2d,0x23,0x21,0x27,0x25,
0x5b,0x59,0x5f,0x5d,0x53,0x51,0x57,0x55,0x4b,0x49,0x4f,0x4d,0x43,0x41,0x47,0x45,
0x7b,0x79,0x7f,0x7d,0x73,0x71,0x77,0x75,0x6b,0x69,0x6f,0x6d,0x63,0x61,0x67,0x65,
0x9b,0x99,0x9f,0x9d,0x93,0x91,0x97,0x95,0x8b,0x89,0x8f,0x8d,0x83,0x81,0x87,0x85,
0xbb,0xb9,0xbf,0xbd,0xb3,0xb1,0xb7,0xb5,0xab,0xa9,0xaf,0xad,0xa3,0xa1,0xa7,0xa5,
0xdb,0xd9,0xdf,0xdd,0xd3,0xd1,0xd7,0xd5,0xcb,0xc9,0xcf,0xcd,0xc3,0xc1,0xc7,0xc5,
0xfb,0xf9,0xff,0xfd,0xf3,0xf1,0xf7,0xf5,0xeb,0xe9,0xef,0xed,0xe3,0xe1,0xe7,0xe5
]
mul_3 = [
0x00,0x03,0x06,0x05,0x0c,0x0f,0x0a,0x09,0x18,0x1b,0x1e,0x1d,0x14,0x17,0x12,0x11,
0x30,0x33,0x36,0x35,0x3c,0x3f,0x3a,0x39,0x28,0x2b,0x2e,0x2d,0x24,0x27,0x22,0x21,
0x60,0x63,0x66,0x65,0x6c,0x6f,0x6a,0x69,0x78,0x7b,0x7e,0x7d,0x74,0x77,0x72,0x71,
0x50,0x53,0x56,0x55,0x5c,0x5f,0x5a,0x59,0x48,0x4b,0x4e,0x4d,0x44,0x47,0x42,0x41,
0xc0,0xc3,0xc6,0xc5,0xcc,0xcf,0xca,0xc9,0xd8,0xdb,0xde,0xdd,0xd4,0xd7,0xd2,0xd1,
0xf0,0xf3,0xf6,0xf5,0xfc,0xff,0xfa,0xf9,0xe8,0xeb,0xee,0xed,0xe4,0xe7,0xe2,0xe1,
0xa0,0xa3,0xa6,0xa5,0xac,0xaf,0xaa,0xa9,0xb8,0xbb,0xbe,0xbd,0xb4,0xb7,0xb2,0xb1,
0x90,0x93,0x96,0x95,0x9c,0x9f,0x9a,0x99,0x88,0x8b,0x8e,0x8d,0x84,0x87,0x82,0x81,
0x9b,0x98,0x9d,0x9e,0x97,0x94,0x91,0x92,0x83,0x80,0x85,0x86,0x8f,0x8c,0x89,0x8a,
0xab,0xa8,0xad,0xae,0xa7,0xa4,0xa1,0xa2,0xb3,0xb0,0xb5,0xb6,0xbf,0xbc,0xb9,0xba,
0xfb,0xf8,0xfd,0xfe,0xf7,0xf4,0xf1,0xf2,0xe3,0xe0,0xe5,0xe6,0xef,0xec,0xe9,0xea,
0xcb,0xc8,0xcd,0xce,0xc7,0xc4,0xc1,0xc2,0xd3,0xd0,0xd5,0xd6,0xdf,0xdc,0xd9,0xda,
0x5b,0x58,0x5d,0x5e,0x57,0x54,0x51,0x52,0x43,0x40,0x45,0x46,0x4f,0x4c,0x49,0x4a,
0x6b,0x68,0x6d,0x6e,0x67,0x64,0x61,0x62,0x73,0x70,0x75,0x76,0x7f,0x7c,0x79,0x7a,
0x3b,0x38,0x3d,0x3e,0x37,0x34,0x31,0x32,0x23,0x20,0x25,0x26,0x2f,0x2c,0x29,0x2a,
0x0b,0x08,0x0d,0x0e,0x07,0x04,0x01,0x02,0x13,0x10,0x15,0x16,0x1f,0x1c,0x19,0x1a
]
mul_9 = [
0x00,0x09,0x12,0x1b,0x24,0x2d,0x36,0x3f,0x48,0x41,0x5a,0x53,0x6c,0x65,0x7e,0x77,
0x90,0x99,0x82,0x8b,0xb4,0xbd,0xa6,0xaf,0xd8,0xd1,0xca,0xc3,0xfc,0xf5,0xee,0xe7,
0x3b,0x32,0x29,0x20,0x1f,0x16,0x0d,0x04,0x73,0x7a,0x61,0x68,0x57,0x5e,0x45,0x4c,
0xab,0xa2,0xb9,0xb0,0x8f,0x86,0x9d,0x94,0xe3,0xea,0xf1,0xf8,0xc7,0xce,0xd5,0xdc,
0x76,0x7f,0x64,0x6d,0x52,0x5b,0x40,0x49,0x3e,0x37,0x2c,0x25,0x1a,0x13,0x08,0x01,
0xe6,0xef,0xf4,0xfd,0xc2,0xcb,0xd0,0xd9,0xae,0xa7,0xbc,0xb5,0x8a,0x83,0x98,0x91,
0x4d,0x44,0x5f,0x56,0x69,0x60,0x7b,0x72,0x05,0x0c,0x17,0x1e,0x21,0x28,0x33,0x3a,
0xdd,0xd4,0xcf,0xc6,0xf9,0xf0,0xeb,0xe2,0x95,0x9c,0x87,0x8e,0xb1,0xb8,0xa3,0xaa,
0xec,0xe5,0xfe,0xf7,0xc8,0xc1,0xda,0xd3,0xa4,0xad,0xb6,0xbf,0x80,0x89,0x92,0x9b,
0x7c,0x75,0x6e,0x67,0x58,0x51,0x4a,0x43,0x34,0x3d,0x26,0x2f,0x10,0x19,0x02,0x0b,
0xd7,0xde,0xc5,0xcc,0xf3,0xfa,0xe1,0xe8,0x9f,0x96,0x8d,0x84,0xbb,0xb2,0xa9,0xa0,
0x47,0x4e,0x55,0x5c,0x63,0x6a,0x71,0x78,0x0f,0x06,0x1d,0x14,0x2b,0x22,0x39,0x30,
0x9a,0x93,0x88,0x81,0xbe,0xb7,0xac,0xa5,0xd2,0xdb,0xc0,0xc9,0xf6,0xff,0xe4,0xed,
0x0a,0x03,0x18,0x11,0x2e,0x27,0x3c,0x35,0x42,0x4b,0x50,0x59,0x66,0x6f,0x74,0x7d,
0xa1,0xa8,0xb3,0xba,0x85,0x8c,0x97,0x9e,0xe9,0xe0,0xfb,0xf2,0xcd,0xc4,0xdf,0xd6,
0x31,0x38,0x23,0x2a,0x15,0x1c,0x07,0x0e,0x79,0x70,0x6b,0x62,0x5d,0x54,0x4f,0x46
]
mul_11 = [
0x00,0x0b,0x16,0x1d,0x2c,0x27,0x3a,0x31,0x58,0x53,0x4e,0x45,0x74,0x7f,0x62,0x69,
0xb0,0xbb,0xa6,0xad,0x9c,0x97,0x8a,0x81,0xe8,0xe3,0xfe,0xf5,0xc4,0xcf,0xd2,0xd9,
0x7b,0x70,0x6d,0x66,0x57,0x5c,0x41,0x4a,0x23,0x28,0x35,0x3e,0x0f,0x04,0x19,0x12,
0xcb,0xc0,0xdd,0xd6,0xe7,0xec,0xf1,0xfa,0x93,0x98,0x85,0x8e,0xbf,0xb4,0xa9,0xa2,
0xf6,0xfd,0xe0,0xeb,0xda,0xd1,0xcc,0xc7,0xae,0xa5,0xb8,0xb3,0x82,0x89,0x94,0x9f,
0x46,0x4d,0x50,0x5b,0x6a,0x61,0x7c,0x77,0x1e,0x15,0x08,0x03,0x32,0x39,0x24,0x2f,
0x8d,0x86,0x9b,0x90,0xa1,0xaa,0xb7,0xbc,0xd5,0xde,0xc3,0xc8,0xf9,0xf2,0xef,0xe4,
0x3d,0x36,0x2b,0x20,0x11,0x1a,0x07,0x0c,0x65,0x6e,0x73,0x78,0x49,0x42,0x5f,0x54,
0xf7,0xfc,0xe1,0xea,0xdb,0xd0,0xcd,0xc6,0xaf,0xa4,0xb9,0xb2,0x83,0x88,0x95,0x9e,
0x47,0x4c,0x51,0x5a,0x6b,0x60,0x7d,0x76,0x1f,0x14,0x09,0x02,0x33,0x38,0x25,0x2e,
0x8c,0x87,0x9a,0x91,0xa0,0xab,0xb6,0xbd,0xd4,0xdf,0xc2,0xc9,0xf8,0xf3,0xee,0xe5,
0x3c,0x37,0x2a,0x21,0x10,0x1b,0x06,0x0d,0x64,0x6f,0x72,0x79,0x48,0x43,0x5e,0x55,
0x01,0x0a,0x17,0x1c,0x2d,0x26,0x3b,0x30,0x59,0x52,0x4f,0x44,0x75,0x7e,0x63,0x68,
0xb1,0xba,0xa7,0xac,0x9d,0x96,0x8b,0x80,0xe9,0xe2,0xff,0xf4,0xc5,0xce,0xd3,0xd8,
0x7a,0x71,0x6c,0x67,0x56,0x5d,0x40,0x4b,0x22,0x29,0x34,0x3f,0x0e,0x05,0x18,0x13,
0xca,0xc1,0xdc,0xd7,0xe6,0xed,0xf0,0xfb,0x92,0x99,0x84,0x8f,0xbe,0xb5,0xa8,0xa3
]
mul_13 = [
0x00,0x0d,0x1a,0x17,0x34,0x39,0x2e,0x23,0x68,0x65,0x72,0x7f,0x5c,0x51,0x46,0x4b,
0xd0,0xdd,0xca,0xc7,0xe4,0xe9,0xfe,0xf3,0xb8,0xb5,0xa2,0xaf,0x8c,0x81,0x96,0x9b,
0xbb,0xb6,0xa1,0xac,0x8f,0x82,0x95,0x98,0xd3,0xde,0xc9,0xc4,0xe7,0xea,0xfd,0xf0,
0x6b,0x66,0x71,0x7c,0x5f,0x52,0x45,0x48,0x03,0x0e,0x19,0x14,0x37,0x3a,0x2d,0x20,
0x6d,0x60,0x77,0x7a,0x59,0x54,0x43,0x4e,0x05,0x08,0x1f,0x12,0x31,0x3c,0x2b,0x26,
0xbd,0xb0,0xa7,0xaa,0x89,0x84,0x93,0x9e,0xd5,0xd8,0xcf,0xc2,0xe1,0xec,0xfb,0xf6,
0xd6,0xdb,0xcc,0xc1,0xe2,0xef,0xf8,0xf5,0xbe,0xb3,0xa4,0xa9,0x8a,0x87,0x90,0x9d,
0x06,0x0b,0x1c,0x11,0x32,0x3f,0x28,0x25,0x6e,0x63,0x74,0x79,0x5a,0x57,0x40,0x4d,
0xda,0xd7,0xc0,0xcd,0xee,0xe3,0xf4,0xf9,0xb2,0xbf,0xa8,0xa5,0x86,0x8b,0x9c,0x91,
0x0a,0x07,0x10,0x1d,0x3e,0x33,0x24,0x29,0x62,0x6f,0x78,0x75,0x56,0x5b,0x4c,0x41,
0x61,0x6c,0x7b,0x76,0x55,0x58,0x4f,0x42,0x09,0x04,0x13,0x1e,0x3d,0x30,0x27,0x2a,
0xb1,0xbc,0xab,0xa6,0x85,0x88,0x9f,0x92,0xd9,0xd4,0xc3,0xce,0xed,0xe0,0xf7,0xfa,
0xb7,0xba,0xad,0xa0,0x83,0x8e,0x99,0x94,0xdf,0xd2,0xc5,0xc8,0xeb,0xe6,0xf1,0xfc,
0x67,0x6a,0x7d,0x70,0x53,0x5e,0x49,0x44,0x0f,0x02,0x15,0x18,0x3b,0x36,0x21,0x2c,
0x0c,0x01,0x16,0x1b,0x38,0x35,0x22,0x2f,0x64,0x69,0x7e,0x73,0x50,0x5d,0x4a,0x47,
0xdc,0xd1,0xc6,0xcb,0xe8,0xe5,0xf2,0xff,0xb4,0xb9,0xae,0xa3,0x80,0x8d,0x9a,0x97
]
mul_14 = [
0x00,0x0e,0x1c,0x12,0x38,0x36,0x24,0x2a,0x70,0x7e,0x6c,0x62,0x48,0x46,0x54,0x5a,
0xe0,0xee,0xfc,0xf2,0xd8,0xd6,0xc4,0xca,0x90,0x9e,0x8c,0x82,0xa8,0xa6,0xb4,0xba,
0xdb,0xd5,0xc7,0xc9,0xe3,0xed,0xff,0xf1,0xab,0xa5,0xb7,0xb9,0x93,0x9d,0x8f,0x81,
0x3b,0x35,0x27,0x29,0x03,0x0d,0x1f,0x11,0x4b,0x45,0x57,0x59,0x73,0x7d,0x6f,0x61,
0xad,0xa3,0xb1,0xbf,0x95,0x9b,0x89,0x87,0xdd,0xd3,0xc1,0xcf,0xe5,0xeb,0xf9,0xf7,
0x4d,0x43,0x51,0x5f,0x75,0x7b,0x69,0x67,0x3d,0x33,0x21,0x2f,0x05,0x0b,0x19,0x17,
0x76,0x78,0x6a,0x64,0x4e,0x40,0x52,0x5c,0x06,0x08,0x1a,0x14,0x3e,0x30,0x22,0x2c,
0x96,0x98,0x8a,0x84,0xae,0xa0,0xb2,0xbc,0xe6,0xe8,0xfa,0xf4,0xde,0xd0,0xc2,0xcc,
0x41,0x4f,0x5d,0x53,0x79,0x77,0x65,0x6b,0x31,0x3f,0x2d,0x23,0x09,0x07,0x15,0x1b,
0xa1,0xaf,0xbd,0xb3,0x99,0x97,0x85,0x8b,0xd1,0xdf,0xcd,0xc3,0xe9,0xe7,0xf5,0xfb,
0x9a,0x94,0x86,0x88,0xa2,0xac,0xbe,0xb0,0xea,0xe4,0xf6,0xf8,0xd2,0xdc,0xce,0xc0,
0x7a,0x74,0x66,0x68,0x42,0x4c,0x5e,0x50,0x0a,0x04,0x16,0x18,0x32,0x3c,0x2e,0x20,
0xec,0xe2,0xf0,0xfe,0xd4,0xda,0xc8,0xc6,0x9c,0x92,0x80,0x8e,0xa4,0xaa,0xb8,0xb6,
0x0c,0x02,0x10,0x1e,0x34,0x3a,0x28,0x26,0x7c,0x72,0x60,0x6e,0x44,0x4a,0x58,0x56,
0x37,0x39,0x2b,0x25,0x0f,0x01,0x13,0x1d,0x47,0x49,0x5b,0x55,0x7f,0x71,0x63,0x6d,
0xd7,0xd9,0xcb,0xc5,0xef,0xe1,0xf3,0xfd,0xa7,0xa9,0xbb,0xb5,0x9f,0x91,0x83,0x8d
]
SBOX = [
0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5, 0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76,
0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0, 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0,
0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc, 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15,
0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a, 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75,
0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0, 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84,
0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b, 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf,
0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85, 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8,
0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5, 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2,
0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17, 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73,
0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88, 0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb,
0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c, 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79,
0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9, 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08,
0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6, 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a,
0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e, 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e,
0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94, 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf,
0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68, 0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16]
INV_SBOX = [
0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38, 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb,
0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87, 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb,
0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d, 0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e,
0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2, 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25,
0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92,
0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda, 0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84,
0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a, 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06,
0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02, 0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b,
0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea, 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73,
0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85, 0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e,
0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89, 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b,
0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20, 0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4,
0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31, 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f,
0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d, 0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef,
0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0, 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61,
0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26, 0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d]
Rcon = [
0x8d, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a,
0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91, 0x39,
0x72, 0xe4, 0xd3, 0xbd, 0x61, 0xc2, 0x9f, 0x25, 0x4a, 0x94, 0x33, 0x66, 0xcc, 0x83, 0x1d, 0x3a,
0x74, 0xe8, 0xcb, 0x8d, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8,
0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef,
0xc5, 0x91, 0x39, 0x72, 0xe4, 0xd3, 0xbd, 0x61, 0xc2, 0x9f, 0x25, 0x4a, 0x94, 0x33, 0x66, 0xcc,
0x83, 0x1d, 0x3a, 0x74, 0xe8, 0xcb, 0x8d, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b,
0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3,
0x7d, 0xfa, 0xef, 0xc5, 0x91, 0x39, 0x72, 0xe4, 0xd3, 0xbd, 0x61, 0xc2, 0x9f, 0x25, 0x4a, 0x94,
0x33, 0x66, 0xcc, 0x83, 0x1d, 0x3a, 0x74, 0xe8, 0xcb, 0x8d, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20,
0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35,
0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91, 0x39, 0x72, 0xe4, 0xd3, 0xbd, 0x61, 0xc2, 0x9f,
0x25, 0x4a, 0x94, 0x33, 0x66, 0xcc, 0x83, 0x1d, 0x3a, 0x74, 0xe8, 0xcb, 0x8d, 0x01, 0x02, 0x04,
0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63,
0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91, 0x39, 0x72, 0xe4, 0xd3, 0xbd,
0x61, 0xc2, 0x9f, 0x25, 0x4a, 0x94, 0x33, 0x66, 0xcc, 0x83, 0x1d, 0x3a, 0x74, 0xe8, 0xcb, 0x8d] | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/snakeCTF/2023/crypto/bloom_bloom/challenge.py | ctfs/snakeCTF/2023/crypto/bloom_bloom/challenge.py | from Crypto.Cipher import AES
from Crypto.Util.Padding import pad
import os
import random
import signal
import mmh3
TIMEOUT = 300
assert("FLAG" in os.environ)
FLAG = os.environ["FLAG"]
assert(FLAG.startswith("CTF{"))
assert(FLAG.endswith("}"))
alphabet = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
users = 0b0
hash_functions_count = 5
size = 256 # number of bits
logged_in = False
key = os.urandom(16)
iv = os.urandom(16)
def check_user(username):
global users
cipher = AES.new(key, AES.MODE_ECB)
enc_username = cipher.encrypt(pad(username.encode(), AES.block_size))
for i in range(hash_functions_count):
digest = mmh3.hash(enc_username, i) % size
if users & (0x1 << digest) == 0:
return False
return True
def add_user(username):
global users
cipher = AES.new(key, AES.MODE_ECB)
enc_username = cipher.encrypt(pad(username.encode(), AES.block_size))
for i in range(hash_functions_count):
digest = mmh3.hash(enc_username, i) % size
users = users | (0x1 << digest)
def login():
global logged_in
logged_in = True
def logout():
global logged_in
logged_in = False
def is_valid(username):
global alphabet
if len(username) > 128:
return False
for c in username:
if c not in alphabet:
return False
return True
def main():
print("__ Welcome to the super secure Database __")
while True:
print("""
1) Login
2) Login as Administrator
3) Register
4) Logout
5) Exit
""")
choice = input("> ")
if choice == "1":
if logged_in:
print("You are already logged in.")
continue
username = input("Username: ")
if not is_valid(username):
print("Invalid username!")
continue
if username == "Administrator":
print("Forbidden!")
continue
if check_user(username):
print(f"Login successfull! Welcome back {username}")
login()
else:
print(f"The user {username} does not exist")
elif choice == "2":
if logged_in:
print("You are already logged in with a different username")
continue
if check_user("Administrator"):
print(f"Welcome back Administrator")
print(f"Here is your flag: {FLAG}")
else:
print("Administrator is not a valid user in the database")
elif choice == "3":
username = input("Username: ")
if not is_valid(username):
print("Invalid username!")
continue
if username == "Administrator":
print("Forbidden!")
continue
if check_user(username):
print(f"Such username already exists!")
continue
add_user(username)
print(f"Good job {username}! You are now able to Login")
elif choice == "4":
if logged_in:
logout()
print("GoodBye!")
else:
print("You are not logged in!")
elif choice == "5":
break
if __name__ == "__main__":
signal.alarm(TIMEOUT)
main() | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/Rush/2023/crypto/BreakMe/chall.py | ctfs/Rush/2023/crypto/BreakMe/chall.py | from Crypto.Util.number import *
from Crypto.PublicKey import RSA
FLAG = ""
with open("flag.txt", "rb") as f:
FLAG = f.read()
p = getPrime(2048)
p_factor = p
p *= p
q = pow(p_factor, 6)
e = 0x10001
N = p*q
"""
-
-
-
VANISHED CODE
(known information: the cipher is just a textbook rsa)
-
-
-
"""
d = inverse(e, phi)
ciphertext = encrypt(FLAG, e, N)
exported = RSA.construct( ( N, e ) ).publickey().exportKey()
with open("key.pem", 'wb') as f:
f.write(exported)
with open('ciphertext.txt', 'w') as f:
f.write(ciphertext.hex())
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/EnigmaXplore/2024/crypto/Fool_Me_Not/chall.py | ctfs/EnigmaXplore/2024/crypto/Fool_Me_Not/chall.py | import os
from Crypto.Cipher import AES
from Crypto.Protocol.KDF import PBKDF2
def multiply_bytes(a, b):
return bytes([x ^ y for x, y in zip(a, b)])
def aes_permutation(data, key):
cipher = AES.new(key, AES.MODE_ECB)
return cipher.encrypt(data)
def encrypt_keys(password, text, length=16):
return PBKDF2(password, text, dkLen=length, count=1000000)
def encrypt(master_key, plaintext, num_rounds=3):
important = os.urandom(16)
k1 = encrypt_keys(plaintext, important, 16)
k2 = encrypt_keys(plaintext, important[::-1], 16)
termination_vector = os.urandom(16)
intermediate = multiply_bytes(master_key, k1)
for _ in range(num_rounds):
intermediate = aes_permutation(intermediate, termination_vector)
intermediate = multiply_bytes(intermediate, k2)
intermediate = aes_permutation(intermediate, termination_vector)
intermediate = multiply_bytes(intermediate, k1)
return termination_vector + important + intermediate
k1 = 'REDACTED'
plaintext = 'REDACTED'
ciphertext = encrypt(plaintext.encode(), k1, num_rounds=5)
print("Ciphertext:", ciphertext)
# ciphertext = b'\xf9e\x8bgO\xab\x8co\xd5l\x91\xc9G\xf0+\xaa?\xe7\xa6\xd2\xa1\xc9:)\xef\xd0\xdd\x9a\xd5\xe8y/\xf70\xb2IM\xf2\x1a\x80\x1b\xb1\xea\xca\x1a\xecw\xb0' | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/IRON/2024/misc/Capital_Of_Italy/main.py | ctfs/IRON/2024/misc/Capital_Of_Italy/main.py | #!/usr/bin/env python3
REDACTED = "ironCTF{test}"
blacklist = 'REDACTEDREDACTED'
print("WELCOME :)")
breakpoint = "breakpoint"
data = input()
if len(data) > 12:
print("Too long...")
exit()
for chars in blacklist:
if chars in data:
print("Blocked Character: ", chars)
exit()
try:
eval(data)
except Exception as e:
print("Something went wrong\n", e) | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/IRON/2024/rev/Super_Secure_Login_Shell/interface.py | ctfs/IRON/2024/rev/Super_Secure_Login_Shell/interface.py | #!/usr/bin/python3
import ctypes
import requests
from getpass import getpass
from hashlib import sha256
from base64 import b64encode
from sys import exit
def generateToken(username :str, password_hash :bytes, access :int) -> bytes:
_tokengen :CDLL = ctypes.CDLL('./tokengen.so')
_tokengen.generateToken.argtypes :tuple[PyCSimpleType] = (ctypes.c_char_p, ctypes.c_uint16, ctypes.POINTER(ctypes.c_ubyte), ctypes.c_uint8)
_tokengen.generateToken.restype :PyCPointerType = ctypes.POINTER(ctypes.c_ubyte)
_tokengen.free.argtypes :tuple[PyCSimpleType] = (ctypes.c_void_p,)
ulen :int = len(username)
size :int = 110 + ulen
barray :PyCArrayType = ctypes.c_ubyte * 32
token :LP_c_ubyte = _tokengen.generateToken(ctypes.c_char_p(username.encode()), ctypes.c_uint16(ulen), barray(*password_hash), ctypes.c_uint8(access))
ret :bytes = b64encode(bytes(token[:size]))
_tokengen.free(token)
return ret
def getCreds() -> tuple[str, bytes, int]:
print('Welcome to the Super Secure Login Shell')
print('Enter your credentials and access level to access the flag!\n')
while True:
username :str = input('Username: ')
if not username or len(username) > 256:
print('Please ensure the username is of 1-256 characters!\n')
continue
password_hash :bytes = sha256(getpass().encode()).digest()
access :int = int(input('Access level: '))
if not 0 <= access <= 255:
print('Please ensure the access level is in the range 0-255!\n')
continue
break
return username, password_hash, access
def login(token: bytes):
url = 'https://b655d7019f1498a.1nf1n1ty.team/'
headers = {'Content-Type': 'application/octet-stream'}
response = requests.post(url, data=token, headers=headers)
if response.status_code == 200:
print("Login successful!")
print("Flag: ", response.content.decode())
elif response.status_code == 401:
print("Unauthorized: Invalid credentials.")
else:
print(f"Error: {response.status_code}, {response.content.decode()}")
def main() -> int:
username, password_hash, access = getCreds()
token :bytes = generateToken(username, password_hash, access)
login(token)
return 0
if __name__ == '__main__':
exit(main())
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/IRON/2024/crypto/Rivest_Shamir_Adleman_1/chal.py | ctfs/IRON/2024/crypto/Rivest_Shamir_Adleman_1/chal.py | from Crypto.Util.number import *
m = open("flag.txt",'rb').read()
m = bytes_to_long(m)
p = getPrime(1024)
q = getPrime(1024)
N = p*q
e = getRandomNBitInteger(16)
c = pow(m,e,N)
p_ = p >> (200)
print(f"{(p_,N,e,c)=}")
# (p_,N,e,c)=(78251056776113743922781362749830646373211175353656790171039496888342171662458492506297767981353887690931452440620588460424832375197427124943346919084717792877241717599798699596252163346397300952154047511640741738581061446499402444306089020012841936, 19155750974833741583193175954281590563726157170945198297004159460941099410928572559396586603869227741976115617781677050055003534675899765832064973073604801444516483333718433505641277789211533814981212445466591143787572063072012686620553662750418892611152219385262027111838502078590253300365603090810554529475615741997879081475539139083909537636187870144455396293865731172472266214152364966965486064463013169673277547545796210067912520397619279792527485993120983571116599728179232502586378026362114554073310185828511219212318935521752030577150436386831635283297669979721206705401841108223134880706200280776161816742511, 37929, 18360638515927091408323573987243771860358592808066239563037326262998090628041137663795836701638491309626921654806176147983008835235564144131508890188032718841579547621056841653365205374032922110171259908854680569139265494330638365871014755623899496058107812891247359641915061447326195936351276776429612672651699554362477232678286997748513921174452554559807152644265886002820939933142395032126999791934865013547916035484742277215894738953606577594559190553807625082545082802319669474061085974345302655680800297032801212853412563127910754108599054834023083534207306068106714093193341748990945064417347044638122445194693) | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/IRON/2024/crypto/Backdoor/chal.py | ctfs/IRON/2024/crypto/Backdoor/chal.py | from curve_operations import Point,Curve # Custom module
from Crypto.Cipher import AES
from Crypto.Util.Padding import pad
from Crypto.Util.number import long_to_bytes
class Dual_EC:
def __init__(self):
p = 229054522729978652250851640754582529779
a = -75
b = -250
self.curve = Curve(p,a,b)
self.P = Point(97396093570994028423863943496522860154 , 2113909984961319354502377744504238189)
self.Q = Point(137281564215976890139225160114831726699 , 111983247632990631097104218169731744696)
self.set_initial_state()
def set_initial_state(self):
self.state = ???SECRET🤫???
def set_next_state(self):
self.state = self.curve.scalar_multiply(self.P, self.state).x
def gen_rand_num(self):
rand_point = self.curve.scalar_multiply(self.Q, self.state)
rand_num = rand_point.x
self.set_next_state()
return rand_num
def main():
prng = Dual_EC()
flag = b'flag{test}'
print("My PRNG has passed International Standards!!!")
print("Here is a Sample Random Number to prove it to you : ", prng.gen_rand_num())
key = long_to_bytes((prng.gen_rand_num() << 128) + prng.gen_rand_num())
iv = long_to_bytes(prng.gen_rand_num())
cipher = AES.new(key, AES.MODE_CBC, iv)
encrypted_bytes = cipher.encrypt(pad(flag, AES.block_size))
print('Encrypted bytes : ',encrypted_bytes)
if(__name__ == "__main__"):
main() | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/IRON/2024/crypto/Minecraft_cipher/image.py | ctfs/IRON/2024/crypto/Minecraft_cipher/image.py | import random
def xor(a,b):
return bytes([x^y for x,y in zip(a,b)])
class CustomRandom:
def __init__(self, m, a=None, b=None, x=None) -> None:
if a is None:
self.a = random.getrandbits(64)
self.b = random.getrandbits(64)
self.x = random.getrandbits(64)
else:
self.a = a
self.b = b
self.x = x
self.m = m
print(f"{self.x = }")
def next_bytes(self):
self.x = (self.a*self.x + self.b) % self.m
return int(bin(self.x)[-16:-9],2),int(bin(self.x)[-23:-16],2)
r = CustomRandom(2**64)
im = open("flag.png", 'rb').read()
ks = [x for _ in range(len(im)//2 + 1) for x in r.next_bytes()]
ct = xor(im, ks)
f = open('flag.enc','wb')
f.write(ct)
f.close()
# self.x = 9014855307380235246 | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/DCTF/2021/crypto/A_Simple_SP_Box/sp_box.py | ctfs/DCTF/2021/crypto/A_Simple_SP_Box/sp_box.py | from string import ascii_letters, digits
from random import SystemRandom
from math import ceil, log
from signal import signal, alarm, SIGALRM
from secret import flag
random = SystemRandom()
ALPHABET = ascii_letters + digits + "_!@#$%.'\"+:;<=}{"
shuffled = list(ALPHABET)
random.shuffle(shuffled)
S_box = {k : v for k, v in zip(ALPHABET, shuffled)}
def encrypt(message):
if len(message) % 2:
message += "_"
message = list(message)
rounds = int(2 * ceil(log(len(message), 2)))
for round in range(rounds):
message = [S_box[c] for c in message]
if round < (rounds-1):
message = [message[i] for i in range(len(message)) if i%2 == 1] + [message[i] for i in range(len(message)) if i%2 == 0]
return ''.join(message)
def play():
print("Here's the flag, please decrypt it for me:")
print(encrypt(flag))
for _ in range(150):
guess = input("> ").strip()
assert 0 < len(guess) <= 10000
if guess == flag:
print("Well done. The flag is:")
print(flag)
break
else:
print("That doesn't look right, it encrypts to this:")
print(encrypt(guess))
def timeout(a, b):
print("\nOut of time. Exiting...")
exit()
signal(SIGALRM, timeout)
alarm(5 * 60)
play()
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/DCTF/2021/crypto/Just_Take_Your_Time/just-take-your-time.py | ctfs/DCTF/2021/crypto/Just_Take_Your_Time/just-take-your-time.py | #!/usr/bin python3
from flag import flag
from Crypto.Cipher import DES3
from time import time
from random import randint
from secrets import token_hex
from pytimedinput import timedInput
guess = 3
TIMEOUT = 1
a = randint(1000000000000000, 9999999999999999)
b = randint(1000000000000000, 9999999999999999)
print("Show me you are worthy and solve for x! You have one second.")
print("{} * {} = ".format(a, b))
answ, _ = timedInput("> ", timeOut = 1, forcedTimeout = True)
try:
assert(a*b == int(answ))
except:
print("You are not worthy!")
exit(1)
key = str(int(time())).zfill(16).encode("utf-8")
secret = token_hex(16)
cipher = DES3.new(key, DES3.MODE_CFB, b"00000000")
encrypted = cipher.encrypt(secret.encode("utf-8"))
print("You have proven yourself to be capable of taking on the final task. Decrypt this and the flag shall be yours!")
print(encrypted.hex())
start_time = time()
while(time() - start_time < TIMEOUT and guess > 0):
delta = time() - start_time
answ, _ = timedInput("> ", timeOut = TIMEOUT + 1 - delta, forcedTimeout = True)
try:
assert(secret == answ)
break
except:
if answ != "":
guess -= 1
if (guess != 1):
print("You are wrong. {} guesses remain.".format(guess))
else:
print("You are wrong. {} guess remains.".format(guess))
if (secret != answ):
print("You have been unsuccessful in your quest for the flag.")
else:
print("Congratulations! Here is your flag.")
print(flag)
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/DCTF/2021/crypto/Lockpicking/lockpicking.py | ctfs/DCTF/2021/crypto/Lockpicking/lockpicking.py | from random import randint
from secret import flag, solvable
from signal import signal, alarm, SIGALRM
class lsfr:
def __init__(self):
self.state = [randint(0, 5039) for _ in range(10)]
while True:
self.coefs = [randint(0, 5039) for _ in range(10)]
if solvable(self): break
def next(self):
n = sum([self.state[i] * self.coefs[i] for i in range(10)]) % 5039
self.state = self.state[1:] + [n]
return n
def check(pin, guess):
a = 0
b = 0
for i in range(len(guess)):
if guess[i] in pin:
if pin.index(guess[i]) == i: a += 1
else: b += 1
return [a,b]
def unique(n):
return len(set("%04d" % n)) == 4
def play():
i = 0
print("Flag is locked under %d pins, you have %d guesses." % (N, r))
for _ in range(r):
guess = input("Enter pin %d:\n>" % (i+1))
a, b = check(pins[i], guess)
if a == 4 and b == 0:
i += 1
if i == N:
print("Congratulations! Here is the flag: %s" % flag)
return
else:
print("Correct, onto the next one!")
else:
print("Wrong! Hint: A%dB%d" % (a,b))
print("Out of guesses, exiting...")
def timeout(a, b):
print("\nOut of time. Exiting...")
exit()
signal(SIGALRM, timeout)
alarm(5 * 60)
rng = lsfr()
r = 260
N = 200
all = ["%04d" % n for n in range(10000) if unique(n)]
pins = [all[rng.next()] for _ in range(N)]
play()
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/CursedCTF/2024/Quals/misc/folderjail/app.py | ctfs/CursedCTF/2024/Quals/misc/folderjail/app.py | #!/usr/local/bin/python
import os
import shutil
import tarfile
from folders.folders import FolderAnalyzer, FolderTranspiler
TMP_DIR = '/tmp/program'
def unzip_tar_gz(hex_input):
tar_gz_data = bytes.fromhex(hex_input)
if os.path.exists(TMP_DIR):
shutil.rmtree(TMP_DIR)
os.makedirs(TMP_DIR, exist_ok=True)
with open(TMP_DIR + '/archive.tar.gz', 'wb') as f:
f.write(tar_gz_data)
with tarfile.open(TMP_DIR + '/archive.tar.gz', 'r:gz', dereference=False) as tar:
tar.extractall(TMP_DIR)
os.remove(TMP_DIR + '/archive.tar.gz')
hex_input = input("> ")
unzip_tar_gz(hex_input)
tokens = FolderAnalyzer(TMP_DIR).lex()
code = FolderTranspiler(tokens).transpile()
exec(code) | python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/CursedCTF/2024/Quals/crypto/hashbash/challenge.py | ctfs/CursedCTF/2024/Quals/crypto/hashbash/challenge.py | import ctypes, zlib
from collections import Counter
from secret import FLAG, FLAG2, FLAG3
# See hash_impl.c for implementations of other hash algorithms
c_hash_impl = ctypes.CDLL("./hash_impl.so")
c_hash_wrapper = lambda alg_name: lambda s: ctypes.c_uint( getattr(c_hash_impl, alg_name)(s) ).value
HASH_ALGS = {
"hash_bkdr": c_hash_wrapper("hash_bkdr"),
"hash_djb2": c_hash_wrapper("hash_djb2"),
"hash_js": c_hash_wrapper("hash_js"),
"hash_loselose": c_hash_wrapper("hash_loselose"),
"hash_sdbm": c_hash_wrapper("hash_sdbm"),
"hash_crc32": zlib.crc32
}
inp = bytes.fromhex(input("Enter string to hash (in hex): "))
assert len(inp) > 1, "short string too boring :("
hash_vals = {name: alg(inp) for name, alg in HASH_ALGS.items()}
for name, hash_val in hash_vals.items():
print(f"{name}(input) = {hex(hash_val)}")
if max(Counter(hash_vals.values()).values()) == 3:
print(f"Collision found! flag: {FLAG}")
elif max(Counter(hash_vals.values()).values()) == 4:
print(f"More collision found! flag: {FLAG2}")
elif max(Counter(hash_vals.values()).values()) > 4:
print(f"Woah. flag: {FLAG3}")
else:
print("No collision - try again!")
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/CursedCTF/2024/Quals/crypto/fuck_joseph/challenge.py | ctfs/CursedCTF/2024/Quals/crypto/fuck_joseph/challenge.py | from Crypto.Util.number import *
from flag import flag
n = getPrime(256) * getPrime(256)
e = 0x10001
print(n)
print(pow(bytes_to_long(flag), e, n))
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/CursedCTF/2024/Quals/crypto/too_many_cooks/too_many_cooks.py | ctfs/CursedCTF/2024/Quals/crypto/too_many_cooks/too_many_cooks.py | BANNER = """\
I was alarmed to learn that cryptographers have broken 31 out of 64 rounds of SHA256.
To defend against cryptographers, I made my own hash function.
I hear rolling your own crypto is bad, so I merely composed existing hash functions.
~ retr0id
"""
import asyncio
import hashlib
from fastcrc.crc64 import ecma_182 as crc64 # https://pypi.org/project/fastcrc/
import xxhash # https://pypi.org/project/xxhash/
def megahash(msg: bytes) -> bytes:
h0 = hashlib.blake2s(msg).digest() # avoid NIST backdoors
h1 = xxhash.xxh64_digest(h0) # gotta go fast
h1 += crc64(msg).to_bytes(8) # but 64 bits isn't enough security, lets add some more
h2 = hashlib.sha3_256(h1).digest() # and we still want a NIST-approved output!
return h2
try:
from flag import FLAG
except ImportError:
print("[!] using placeholder flag")
FLAG = "flag{placeholder}"
HOST = "0.0.0.0"
PORT = 4200
TIMEOUT = 60
async def handle_client(reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
writer.write(BANNER.encode())
writer.write(b"Show me a hash collision for sha3_256( xxh64(blake2s_256(msg)) || crc64(msg) )\n\n")
writer.write(b"msg1 (hex): ")
await writer.drain()
line = await reader.readline()
try:
msg1 = bytes.fromhex(line.decode())
except:
writer.write(b"invalid input\n")
await writer.drain()
return
writer.write(b"msg2 (hex): ")
await writer.drain()
line = await reader.readline()
try:
msg2 = bytes.fromhex(line.decode())
except:
writer.write(b"invalid input\n")
await writer.drain()
return
if len(msg1) > 1024 or len(msg2) > 1024:
writer.write(b"too big!\n")
await writer.drain()
return
if msg1 == msg2:
writer.write(b"You can't fool me that easily!\n")
await writer.drain()
return
h1 = megahash(msg1)
h2 = megahash(msg2)
writer.write(f"\nh1 = {h1.hex()}\n".encode())
writer.write(f"h2 = {h2.hex()}\n".encode())
if h1 == h2:
writer.write(f"\nCongrats!\n{FLAG}\n".encode())
else:
writer.write(b"\n:(\n")
await writer.drain()
async def handle_client_safely(reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
peer = writer.get_extra_info("peername")
print("[+] New connection from", peer)
try:
await asyncio.wait_for(handle_client(reader, writer), TIMEOUT)
writer.close()
print("[+] Gracefully closed connection from", peer)
except ConnectionResetError:
print("[*] Connection reset by", peer)
except asyncio.exceptions.TimeoutError:
print("[*] Connection timed out", peer)
writer.close()
async def main():
server = await asyncio.start_server(handle_client_safely, HOST, PORT)
print("[+] Server started")
async with server:
await server.serve_forever()
if __name__ == "__main__":
asyncio.run(main())
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/CursedCTF/2024/Quals/crypto/viewing/source.py | ctfs/CursedCTF/2024/Quals/crypto/viewing/source.py | #!/usr/bin/env python3
from os import urandom
from secret import FLAG
PERM_1 = {0: 120, 1: 224, 2: 249, 3: 98, 4: 88, 5: 4, 6: 210, 7: 54, 8: 83, 9: 63, 10: 153, 11: 245, 12: 203, 13: 139, 14: 137, 15: 39, 16: 217, 17: 1, 18: 90, 19: 107, 20: 72, 21: 96, 22: 191, 23: 112, 24: 199, 25: 113, 26: 16, 27: 160, 28: 58, 29: 214, 30: 236, 31: 51, 32: 91, 33: 147, 34: 145, 35: 226, 36: 208, 37: 114, 38: 13, 39: 175, 40: 187, 41: 184, 42: 243, 43: 146, 44: 87, 45: 6, 46: 247, 47: 131, 48: 100, 49: 136, 50: 110, 51: 71, 52: 55, 53: 242, 54: 89, 55: 228, 56: 252, 57: 144, 58: 215, 59: 200, 60: 251, 61: 38, 62: 21, 63: 232, 64: 48, 65: 115, 66: 135, 67: 121, 68: 240, 69: 8, 70: 148, 71: 173, 72: 60, 73: 198, 74: 84, 75: 80, 76: 17, 77: 241, 78: 219, 79: 2, 80: 33, 81: 101, 82: 77, 83: 134, 84: 128, 85: 161, 86: 29, 87: 119, 88: 181, 89: 20, 90: 149, 91: 157, 92: 150, 93: 227, 94: 165, 95: 117, 96: 37, 97: 85, 98: 42, 99: 32, 100: 231, 101: 185, 102: 81, 103: 127, 104: 69, 105: 65, 106: 156, 107: 196, 108: 220, 109: 162, 110: 47, 111: 103, 112: 223, 113: 132, 114: 41, 115: 99, 116: 0, 117: 53, 118: 93, 119: 170, 120: 195, 121: 171, 122: 202, 123: 22, 124: 111, 125: 15, 126: 244, 127: 254, 128: 169, 129: 76, 130: 19, 131: 43, 132: 179, 133: 167, 134: 26, 135: 230, 136: 97, 137: 159, 138: 49, 139: 206, 140: 155, 141: 138, 142: 197, 143: 142, 144: 5, 145: 30, 146: 28, 147: 176, 148: 229, 149: 7, 150: 250, 151: 130, 152: 166, 153: 211, 154: 218, 155: 140, 156: 82, 157: 92, 158: 106, 159: 62, 160: 216, 161: 56, 162: 178, 163: 238, 164: 64, 165: 124, 166: 59, 167: 118, 168: 102, 169: 182, 170: 11, 171: 151, 172: 44, 173: 205, 174: 67, 175: 253, 176: 109, 177: 9, 178: 201, 179: 73, 180: 186, 181: 180, 182: 31, 183: 248, 184: 34, 185: 36, 186: 116, 187: 61, 188: 222, 189: 192, 190: 239, 191: 177, 192: 122, 193: 133, 194: 204, 195: 50, 196: 94, 197: 164, 198: 194, 199: 95, 200: 237, 201: 213, 202: 10, 203: 209, 204: 104, 205: 154, 206: 188, 207: 24, 208: 183, 209: 27, 210: 189, 211: 174, 212: 126, 213: 45, 214: 143, 215: 212, 216: 221, 217: 23, 218: 234, 219: 123, 220: 193, 221: 158, 222: 255, 223: 141, 224: 163, 225: 75, 226: 18, 227: 70, 228: 3, 229: 105, 230: 246, 231: 35, 232: 152, 233: 86, 234: 233, 235: 108, 236: 40, 237: 79, 238: 74, 239: 172, 240: 125, 241: 207, 242: 78, 243: 46, 244: 14, 245: 235, 246: 168, 247: 25, 248: 190, 249: 66, 250: 225, 251: 12, 252: 129, 253: 52, 254: 57, 255: 68}
PERM_2 = {120: 0, 224: 1, 249: 2, 98: 3, 88: 4, 4: 5, 210: 6, 54: 7, 83: 8, 63: 9, 153: 10, 245: 11, 203: 12, 139: 13, 137: 14, 39: 15, 217: 16, 1: 17, 90: 18, 107: 19, 72: 20, 96: 21, 191: 22, 112: 23, 199: 24, 113: 25, 16: 26, 160: 27, 58: 28, 214: 29, 236: 30, 51: 31, 91: 32, 147: 33, 145: 34, 226: 35, 208: 36, 114: 37, 13: 38, 175: 39, 187: 40, 184: 41, 243: 42, 146: 43, 87: 44, 6: 45, 247: 46, 131: 47, 100: 48, 136: 49, 110: 50, 71: 51, 55: 52, 242: 53, 89: 54, 228: 55, 252: 56, 144: 57, 215: 58, 200: 59, 251: 60, 38: 61, 21: 62, 232: 63, 48: 64, 115: 65, 135: 66, 121: 67, 240: 68, 8: 69, 148: 70, 173: 71, 60: 72, 198: 73, 84: 74, 80: 75, 17: 76, 241: 77, 219: 78, 2: 79, 33: 80, 101: 81, 77: 82, 134: 83, 128: 84, 161: 85, 29: 86, 119: 87, 181: 88, 20: 89, 149: 90, 157: 91, 150: 92, 227: 93, 165: 94, 117: 95, 37: 96, 85: 97, 42: 98, 32: 99, 231: 100, 185: 101, 81: 102, 127: 103, 69: 104, 65: 105, 156: 106, 196: 107, 220: 108, 162: 109, 47: 110, 103: 111, 223: 112, 132: 113, 41: 114, 99: 115, 0: 116, 53: 117, 93: 118, 170: 119, 195: 120, 171: 121, 202: 122, 22: 123, 111: 124, 15: 125, 244: 126, 254: 127, 169: 128, 76: 129, 19: 130, 43: 131, 179: 132, 167: 133, 26: 134, 230: 135, 97: 136, 159: 137, 49: 138, 206: 139, 155: 140, 138: 141, 197: 142, 142: 143, 5: 144, 30: 145, 28: 146, 176: 147, 229: 148, 7: 149, 250: 150, 130: 151, 166: 152, 211: 153, 218: 154, 140: 155, 82: 156, 92: 157, 106: 158, 62: 159, 216: 160, 56: 161, 178: 162, 238: 163, 64: 164, 124: 165, 59: 166, 118: 167, 102: 168, 182: 169, 11: 170, 151: 171, 44: 172, 205: 173, 67: 174, 253: 175, 109: 176, 9: 177, 201: 178, 73: 179, 186: 180, 180: 181, 31: 182, 248: 183, 34: 184, 36: 185, 116: 186, 61: 187, 222: 188, 192: 189, 239: 190, 177: 191, 122: 192, 133: 193, 204: 194, 50: 195, 94: 196, 164: 197, 194: 198, 95: 199, 237: 200, 213: 201, 10: 202, 209: 203, 104: 204, 154: 205, 188: 206, 24: 207, 183: 208, 27: 209, 189: 210, 174: 211, 126: 212, 45: 213, 143: 214, 212: 215, 221: 216, 23: 217, 234: 218, 123: 219, 193: 220, 158: 221, 255: 222, 141: 223, 163: 224, 75: 225, 18: 226, 70: 227, 3: 228, 105: 229, 246: 230, 35: 231, 152: 232, 86: 233, 233: 234, 108: 235, 40: 236, 79: 237, 74: 238, 172: 239, 125: 240, 207: 241, 78: 242, 46: 243, 14: 244, 235: 245, 168: 246, 25: 247, 190: 248, 66: 249, 225: 250, 12: 251, 129: 252, 52: 253, 57: 254, 68: 255}
def bxor(a, b): return bytes(x ^ y for x, y in zip(a, b))
KEYS = [urandom(len(FLAG)) for _ in range(32)]
for i, k in enumerate(KEYS):
with open(f'key_{i}_{urandom(16).hex()}.txt', 'w') as f:
f.write(k.hex())
ct = FLAG
for k in KEYS:
roundkey = k
for round in range(35):
ct = bxor(ct, roundkey)
roundkey = bytes(PERM_1[x] for x in roundkey)
roundkey = bytes(PERM_2[x] for x in roundkey)
with open('out.txt', 'w') as f:
f.write(ct.hex())
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
sajjadium/ctf-archives | https://github.com/sajjadium/ctf-archives/blob/129a3a9fe604443211fa4d493a49630c30689df7/ctfs/CPCTF/2025/crypto/Prime_Tester/chal.py | ctfs/CPCTF/2025/crypto/Prime_Tester/chal.py | from math import gcd
def is_prime(n):
if n == 2:
return True
if n == 1 or n & 1 == 0:
return False
d = n - 1
while d & 1 == 0:
d >>= 1
for a in range(500):
if gcd(a, n) != 1:
continue
t = d
y = pow(a, t, n)
while t != n - 1 and y != 1 and y != n - 1:
y = (y * y) % n
t <<= 1
if y != n - 1 and t & 1 == 0:
return False
return True
if __name__ == '__main__':
n = int(input("What is your favorite prime number?: "))
if n <= 2 or 4096 <= n.bit_length():
print("Hmm... I don't like this.")
exit(0)
if not is_prime(n):
print(":(")
exit(0)
x = int(input("What is your favorite number?: "))
if x <= 1 or x >= n - 1:
print(":(")
exit(0)
if pow(x, 2, n) == x:
print("Wow! How did you do that?")
with open("flag.txt") as f:
print(f.read())
else:
print("Nice!")
exit(0)
| python | MIT | 129a3a9fe604443211fa4d493a49630c30689df7 | 2026-01-05T01:34:13.869332Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/.github/scripts/get_release_props.py | .github/scripts/get_release_props.py | # /// script
# requires-python = ">=3.12"
# dependencies = [
# "pyyaml",
# "click",
# ]
# ///
"""Extract the properties of a configured EEST release from a YAML file."""
import sys
import click
import yaml
RELEASE_PROPS_FILE = "./.github/configs/feature.yaml"
@click.command()
@click.argument("release", required=True)
def get_release_props(release: str) -> None:
"""Extract the properties from the YAML file for a given release."""
with open(RELEASE_PROPS_FILE) as f:
data = yaml.safe_load(f)
if release not in data:
print(f"Error: Release {release} not found in {RELEASE_PROPS_FILE}.")
sys.exit(1)
print("\n".join(f"{key}={value}" for key, value in data[release].items()))
if __name__ == "__main__":
get_release_props()
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/.github/scripts/generate_eip_report.py | .github/scripts/generate_eip_report.py | """
Generate a markdown report of outdated EIP references from the EIP version
checker output.
"""
import os
import re
import sys
import textwrap
from string import Template
from typing import List, Tuple
# Report template using textwrap.dedent for clean multiline strings
REPORT_TEMPLATE = Template(
textwrap.dedent("""\
# EIP Version Check Report
This automated check has detected that some EIP references in test files are outdated. This means that the EIPs have been updated in the [ethereum/EIPs](https://github.com/ethereum/EIPs) repository since our tests were last updated.
## Outdated EIP References
### Summary Table
| File | EIP Link | Referenced Version | Latest Version |
| ---- | -------- | ------------------ | -------------- |
$summary_table
### Verbatim Failures
```
$fail_messages
```
### Verbatim Errors
```
$error_messages
```
## Action Required
1. Please verify whether the affected tests need updating based on changes in the EIP spec.
2. Update the `REFERENCE_SPEC_VERSION` in each file with the latest version shown above.
3. For detailed instructions, see the [reference specification documentation](https://eest.ethereum.org/main/writing_tests/reference_specification/).
## Workflow Information
For more details, see the [workflow run](https://github.com/ethereum/execution-spec-tests/actions/runs/$run_id).
""") # noqa: E501
)
def extract_failures(output: str) -> List[Tuple[str, str, str, str, str, str]]:
"""Extract failure information from the output using regex."""
failures = []
for line in output.split("\n"):
if not line.startswith("FAILED"):
continue
# Extract test file path
file_match = re.search(r"FAILED (tests/[^:]+\.py)", line)
if not file_match:
continue
file_path = file_match.group(1)
# Extract EIP number
eip_match = re.search(r"eip(\d+)", file_path, re.IGNORECASE)
eip_num = f"EIP-{eip_match.group(1)}" if eip_match else "Unknown"
# Extract full path
full_path_match = re.search(r"from '([^']+)'", line)
full_path = full_path_match.group(1) if full_path_match else "Unknown"
# Extract EIP link
eip_link_match = re.search(r"Spec: (https://[^ ]+)\.", line)
eip_link = eip_link_match.group(1) if eip_link_match else ""
eip_link = eip_link.replace("blob/", "commits/") if eip_link else ""
# Extract versions
ref_version_match = re.search(r"Referenced version: ([a-f0-9]+)", line)
ref_version = ref_version_match.group(1) if ref_version_match else "Unknown"
latest_version_match = re.search(r"Latest version: ([a-f0-9]+)", line)
latest_version = latest_version_match.group(1) if latest_version_match else "Unknown"
failures.append((file_path, eip_num, full_path, eip_link, ref_version, latest_version))
return failures
def generate_summary_table(failures: List[Tuple[str, str, str, str, str, str]]) -> str:
"""Generate a markdown summary table from the failures."""
rows = []
for file_path, eip_num, _, eip_link, ref_version, latest_version in failures:
rows.append(
f"| `{file_path}` | [{eip_num}]({eip_link}) | `{ref_version}` | `{latest_version}` |"
)
return "\n".join(rows)
def main() -> None:
"""Generate the report."""
if len(sys.argv) < 2:
print("Usage: uv run python generate_eip_report.py <input_file> [output_file]")
sys.exit(1)
input_file = sys.argv[1]
output_file = sys.argv[2] if len(sys.argv) > 2 else "./reports/outdated_eips.md"
try:
with open(input_file, "r") as f:
output = f.read()
except Exception as e:
print(f"Error reading input file: {e}")
sys.exit(1)
failures = extract_failures(output)
fail_messages = "\n".join(line for line in output.split("\n") if line.startswith("FAILED"))
if not fail_messages:
fail_messages = (
"No test failures were found in the pytest output: No lines start with 'FAILED'."
)
error_messages = "\n".join(line for line in output.split("\n") if line.startswith("ERROR"))
if not error_messages:
error_messages = (
"No test errors were found in the pytest output: No lines start with 'ERROR'."
)
report_content = REPORT_TEMPLATE.substitute(
summary_table=generate_summary_table(failures),
fail_messages=fail_messages,
error_messages=error_messages,
run_id=os.environ.get("GITHUB_RUN_ID", ""),
)
try:
with open(output_file, "w") as report:
report.write(report_content)
except Exception as e:
print(f"Error writing output file: {e}")
sys.exit(1)
print(f"Report generated successfully: {output_file}")
print(f"Found {len(failures)} outdated EIP references")
if __name__ == "__main__":
main()
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/scripts/convert_addresses.py | scripts/convert_addresses.py | #!/usr/bin/env python3
"""
Simple address converter for static test fillers.
Two-pass approach:
1. Collect all addresses and create mappings
2. Replace all occurrences with tags.
"""
import argparse
import re
from enum import Enum, auto
from pathlib import Path
from typing import Dict, List, Optional, Set
from ethereum_test_forks import Prague
class Section(Enum):
"""Represents the current section being parsed."""
NONE = auto()
PRE = auto()
ENV = auto()
TRANSACTION = auto()
RESULT = auto()
EXPECT = auto()
class Context(Enum):
"""Represents the current context within a section."""
NORMAL = auto()
CODE = auto()
STORAGE = auto()
# Known secret key that maps to sender address
KNOWN_SECRET_KEY = "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8"
KNOWN_SENDER_ADDRESS = "a94f5374fce5edbc8e2a8697c15331677e6ebf0b"
# don't convert default coinbase since this is the same used in python tests
# TODO: check if coinbase is affected in `result` section. If so, we need to tag it
# to generate a dynamic address for it in both the `result` and `currentCoinbase` sections
CONVERT_COINBASE = False
PRECOMPILE_ADDRESSES = {pre.hex() for pre in Prague.precompiles()}
# TODO: check these manually for false positives
# callToSuicideThenExtcodehashFiller.json -- hard-coded 000...00025
FALSE_POSITIVE_TESTS = {
# possible false positives to check
"staticcall_createfailsFiller.json",
"createInitFail_OOGduringInit2Filler.json",
"createInitFail_OOGduringInitFiller.json",
"createNameRegistratorPreStore1NotEnoughGasFiller.json",
# definite false positives
"codesizeOOGInvalidSizeFiller.json",
# "contractCreationOOGdontLeaveEmptyContractFiller.json", # Temporarily enabling for testing
"contractCreationOOGdontLeaveEmptyContractViaTransactionFiller.json",
"createContractViaContractFiller.json",
"createContractViaContractOOGInitCodeFiller.json",
"createContractViaTransactionCost53000Filler.json",
}
# Path patterns for incompatible tests - can include full filenames or directory patterns
INCOMPATIBLE_PATH_PATTERNS = {
# Exact filenames (existing)
"push0Filler.yml",
"push0Gas2Filler.yml",
"ContractCreationSpamFiller.json",
"create2InitCodeSizeLimitFiller.yml",
"createInitCodeSizeLimitFiller.yml",
"creationTxInitCodeSizeLimitFiller.yml",
"suicideNonConstFiller.yml",
"createNonConstFiller.yml",
"CrashingTransactionFiller.json",
"measureGasFiller.yml",
"operationDiffGasFiller.yml",
"callcodeDynamicCodeFiller.json",
"callcodeDynamicCode2SelfCallFiller.json",
"callcodeInInitcodeToEmptyContractFiller.json",
"callcodeInInitcodeToExistingContractFiller.json",
"callcodeInInitcodeToExisContractWithVTransferNEMoneyFiller.json",
"callcodeInInitcodeToExistingContractWithValueTransferFiller.json",
"contractCreationMakeCallThatAskMoreGasThenTransactionProvidedFiller.json",
"codesizeInitFiller.json",
"codesizeValidFiller.json",
"create2CodeSizeLimitFiller.yml",
"createCodeSizeLimitFiller.yml",
"createFailBalanceTooLowFiller.json",
"createInitOOGforCREATEFiller.json",
"createJS_NoCollisionFiller.json",
"createNameRegistratorPerTxsFiller.json",
"createNameRegistratorPerTxsNotEnoughGasFiller.json",
"undefinedOpcodeFirstByteFiller.yml",
"block504980Filler.json",
"static_CallEcrecover0Filler.json",
"static_CallEcrecover0_completeReturnValueFiller.json",
"static_CallEcrecover0_gas3000Filler.json",
"static_CallEcrecover0_overlappingInputOutputFiller.json",
"static_CallEcrecoverCheckLengthFiller.json",
"static_CallEcrecover0_prefixed0Filler.json",
"static_contractCreationMakeCallThatAskMoreGasThenTransactionProvidedFiller.json",
"static_contractCreationOOGdontLeaveEmptyContractViaTransactionFiller.json",
"callcodecallcallcode_101_SuicideMiddleFiller.json",
"static_callcodecallcallcode_101_OOGMAfter2Filler.json",
"delegatecallInInitcodeToEmptyContractFiller.json",
"delegatecallInInitcodeToExistingContractFiller.json",
"delegatecallInInitcodeToExistingContractOOGFiller.json",
"delegatecodeDynamicCode2SelfCallFiller.json",
"delegatecodeDynamicCodeFiller.json",
"RawCreateFailGasValueTransferFiller.json",
"RawCreateFailGasValueTransfer2Filler.json",
"ExtCodeCopyTestsParisFiller.json",
"EXTCODESIZE_toNonExistentFiller.json",
"CreateAndGasInsideCreateFiller.json",
"RawCreateGasFiller.json",
"RawCreateGasMemoryFiller.json",
"RawCreateGasValueTransferFiller.json",
"RawCreateGasValueTransferMemoryFiller.json",
"Transaction64Rule_integerBoundariesFiller.yml",
"addressOpcodesFiller.yml",
"baseFeeDiffPlacesFiller.yml",
"eip2929-ffFiller.yml",
"eip2929Filler.yml",
"eip2929OOGFiller.yml",
"gasCostJumpFiller.yml",
"gasCostMemoryFiller.yml",
"gasPriceDiffPlacesFiller.yml",
"initCollidingWithNonEmptyAccountFiller.yml",
"manualCreateFiller.yml",
"storageCostsFiller.yml",
"variedContextFiller.yml",
"vitalikTransactionTestParisFiller.json",
# stExample
"add11_ymlFiller.yml",
"add11Filler.json",
# stExtCodeHash
"extcodehashEmpty_ParisFiller.yml",
"extCodeHashSelfInInitFiller.json",
"extCodeHashSubcallSuicideCancunFiller.yml",
"extCodeHashNewAccountFiller.json",
"extCodeHashDeletedAccount1CancunFiller.yml",
"extCodeHashDeletedAccount2CancunFiller.yml",
"extCodeHashDeletedAccount3Filler.yml",
"extCodeHashDeletedAccount4Filler.yml",
"extCodeHashCreatedAndDeletedAccountStaticCallFiller.json",
"extCodeHashCreatedAndDeletedAccountRecheckInOuterCallFiller.json",
"dynamicAccountOverwriteEmpty_ParisFiller.yml",
"extCodeHashDeletedAccountCancunFiller.yml",
"extCodeHashDeletedAccountFiller.yml",
"extCodeHashSubcallSuicideFiller.yml",
"CreateAndGasInsideCreateWithMemExpandingCallsFiller.json",
"codeCopyZero_ParisFiller.yml",
"extCodeHashInInitCodeFiller.json",
"extCodeHashSelfFiller.json",
"extCodeHashNonExistingAccountFiller.yml",
"extCodeHashSubcallOOGFiller.yml",
"extCodeHashPrecompilesFiller.yml",
"contractCreationOOGdontLeaveEmptyContractFiller.json",
"extCodeHashCreatedAndDeletedAccountCallFiller.json",
"createEmptyThenExtcodehashFiller.json",
"contractCreationOOGdontLeaveEmptyContractViaTransactionFiller.json",
# Really only `ReturnTestFiller` and `ReturnTest2Filler` are compatible inside `stInitCodeTest`
"CallContractToCreateContractAndCallItOOGFiller.json",
"CallContractToCreateContractOOGBonusGasFiller.json",
"CallContractToCreateContractWhichWouldCreateContractIfCalledFiller.json",
"CallContractToCreateContractWhichWouldCreateContractInInitCodeFiller.json",
"CallRecursiveContractFiller.json",
"StackUnderFlowContractCreationFiller.json",
"TransactionCreateRandomInitCodeFiller.json",
"TransactionCreateAutoSuicideContractFiller.json",
"TransactionCreateSuicideInInitcodeFiller.json",
"CallTheContractToCreateEmptyContractFiller.json",
"OutOfGasContractCreationFiller.json",
"OutOfGasPrefundedContractCreationFiller.json",
"TransactionCreateStopInInitcodeFiller.json",
# stInitCodeTest
"ReturnTestFiller.json",
"ReturnTest2Filler.json",
"CREATE_Bounds2Filler.json",
"CREATE_Bounds3Filler.json",
"CREATE_BoundsFiller.json",
"NonZeroValue_CALLCODEFiller.json",
"NonZeroValue_DELEGATECALLFiller.json",
"bufferFiller.yml",
"bufferSrcOffsetFiller.yml",
"callDataCopyOffsetFiller.json",
"oogFiller.yml",
"CALLCODEEcrecover0Filler.json",
"CALLCODEEcrecoverV_prefixed0Filler.json",
"CALLCODEEcrecover0_completeReturnValueFiller.json",
"CALLCODEEcrecover0_overlappingInputOutputFiller.json",
"CALLCODEEcrecover0_gas3000Filler.json",
"CallEcrecover0_completeReturnValueFiller.json",
"CallEcrecoverCheckLengthFiller.json",
"CallEcrecover0_gas3000Filler.json",
"CallEcrecover0Filler.json",
"CallEcrecover0_overlappingInputOutputFiller.json",
"CallEcrecoverV_prefixed0Filler.json",
"ecrecoverShortBuffFiller.yml",
"modexp_0_0_0_20500Filler.json",
"modexp_0_0_0_22000Filler.json",
"modexp_0_0_0_25000Filler.json",
"modexp_0_0_0_35000Filler.json",
"Create1000ShnghaiFiller.json",
"QuadraticComplexitySolidity_CallDataCopyFiller.json",
"testRandomTestFiller.json",
# uses coinbase address (0x41) as integer memory offsets. TODO: is this necessary for the test?
# we could end up changing this for compatibility instead.
"randomStatetest1Filler.json",
"randomStatetest173Filler.json",
"randomStatetest107Filler.json",
"randomStatetest137Filler.json",
"randomStatetest246Filler.json",
"randomStatetest263Filler.json",
"randomStatetest267Filler.json",
"randomStatetest308Filler.json",
"randomStatetest367Filler.json",
"randomStatetest41Filler.json",
"randomStatetest372Filler.json",
"randomStatetest64Filler.json",
"randomStatetest368Filler.json",
"randomStatetest362Filler.json",
"randomStatetest80Filler.json",
"randomStatetest73Filler.json",
"randomStatetest66Filler.json",
"randomStatetest406Filler.json",
"randomStatetest388Filler.json",
"randomStatetest437Filler.json",
"randomStatetest473Filler.json",
"randomStatetest502Filler.json",
"randomStatetest526Filler.json",
"randomStatetest545Filler.json",
"randomStatetest537Filler.json",
"randomStatetest564Filler.json",
"randomStatetest307Filler.json",
"randomStatetest646Filler.json",
"Call50000_sha256Filler.json",
"Create1000ByzantiumFiller.json",
"Create1000Filler.json",
# stRecursiveCreate
"RecursiveCreateContractsCreate4ContractsFiller.json",
"RecursiveCreateContractsFiller.json",
# stStackTests
"shallowStackFiller.json",
"stackOverflowFiller.json",
"stackOverflowDUPFiller.json",
"stackOverflowPUSHFiller.json",
"revertRetDataSizeFiller.yml",
"returndatacopy_0_0_following_successful_createFiller.json",
"RevertPrefoundFiller.json",
"RevertPrefoundEmpty_ParisFiller.json",
"RevertDepthCreateOOGFiller.json",
"costRevertFiller.yml",
"LoopCallsDepthThenRevert2Filler.json",
"LoopCallsDepthThenRevert3Filler.json",
"CreateContractFromMethodFiller.json",
"RevertOpcodeCreateFiller.json",
"RevertOpcodeInInitFiller.json",
"RevertOpcodeWithBigOutputInInitFiller.json",
"ByZeroFiller.json",
"TestCryptographicFunctionsFiller.json",
"StackDepthLimitSECFiller.json",
"eoaEmptyParisFiller.yml",
"deploymentErrorFiller.json",
"sstore_0to", # Note: many tests match this pattern
"sstore_Xto", # Note: many tests match this pattern
"sstore_changeFromExternalCallInInitCodeFiller.json",
"stackOverflowM1DUPFiller.json",
"stackOverflowM1Filler.json",
"stackOverflowM1PUSHFiller.json",
"stackOverflowSWAPFiller.json",
"stacksanitySWAPFiller.json",
# TODO: See if any of these can be turned on with fine tuning
"static_ABAcallsSuicide1Filler.json",
"static_CREATE_ContractSuicideDuringInitFiller.json",
"static_CREATE_ContractSuicideDuringInit_ThenStoreThenReturnFiller.json",
"static_CREATE_ContractSuicideDuringInit_WithValueFiller.json",
"static_ZeroValue_CALL_OOGRevertFiller.json",
"static_CREATE_EmptyContractAndCallIt_0weiFiller.json",
"static_CREATE_EmptyContractWithStorageAndCallIt_0weiFiller.json",
"static_RawCallGasAskFiller.json",
"static_InternalCallStoreClearsOOGFiller.json",
"StaticcallToPrecompileFromCalledContractFiller.yml",
"static_callCreate2Filler.json",
"static_callCreate3Filler.json",
"static_CallContractToCreateContractOOGFiller.json",
"StaticcallToPrecompileFromContractInitializationFiller.yml",
"StaticcallToPrecompileFromTransactionFiller.yml",
"CallWithNOTZeroValueToPrecompileFromContractInitializationFiller.yml",
"CallWithZeroValueToPrecompileFromContractInitializationFiller.yml",
"CallWithZeroValueToPrecompileFromTransactionFiller.yml",
"CallWithZeroValueToPrecompileFromCalledContractFiller.yml",
"DelegatecallToPrecompileFromCalledContractFiller.yml",
"CallcodeToPrecompileFromContractInitializationFiller.yml",
"CallcodeToPrecompileFromCalledContractFiller.yml",
"CallcodeToPrecompileFromTransactionFiller.yml",
"DelegatecallToPrecompileFromContractInitializationFiller.yml",
"DelegatecallToPrecompileFromTransactionFiller.yml",
"CreateHashCollisionFiller.json",
"createNameRegistratorZeroMem2Filler.json",
"createNameRegistratorZeroMemFiller.json",
"doubleSelfdestructTestFiller.yml",
"createNameRegistratorZeroMemExpansionFiller.json",
"createNameRegistratorFiller.json",
"multiSelfdestructFiller.yml",
"suicideCallerAddresTooBigRightFiller.json",
"createNameRegistratorPerTxs", # Note: many tests match this pattern
"addmodFiller.yml",
"addFiller.yml",
"divFiller.yml",
"expFiller.yml",
"modFiller.yml",
"mulmodFiller.yml",
"sdivFiller.yml",
"signextendFiller.yml",
"mulFiller.yml",
"notFiller.yml",
"subFiller.yml",
"smodFiller.yml",
"byteFiller.yml",
"iszeroFiller.yml",
"eqFiller.yml",
"ltFiller.yml",
"sgtFiller.yml",
"sltFiller.yml",
"gtFiller.yml",
"xorFiller.yml",
"orFiller.yml",
"andFiller.yml",
"codecopyFiller.yml",
"jumpFiller.yml",
"gasFiller.yml",
"jumpToPushFiller.yml",
"jumpiFiller.yml",
"loopsConditionalsFiller.yml",
"msizeFiller.yml",
"mstoreFiller.yml",
"mstore8Filler.yml",
"mloadFiller.yml",
"sstore_sloadFiller.yml",
"returnFiller.yml",
"pcFiller.yml",
"popFiller.yml",
"log0Filler.yml",
"log1Filler.yml",
"log2Filler.yml",
"log3Filler.yml",
"log4Filler.yml",
"blockInfoFiller.yml",
"envInfoFiller.yml",
"sha3Filler.yml",
"suicideFiller.yml",
"swapFiller.yml",
"Opcodes_TransactionInitFiller.json",
"CreateMessageSuccessFiller.json",
"CreateTransactionSuccessFiller.json",
"EmptyTransaction3Filler.json",
"SuicidesAndInternalCallSuicidesBonusGasAtCallFiller.json",
"SuicidesStopAfterSuicideFiller.json",
"SuicidesAndInternalCallSuicidesBonusGasAtCallFailedFiller.json",
"SuicidesAndInternalCallSuicidesSuccessFiller.json",
"StoreGasOnCreateFiller.json",
"TransactionSendingToEmptyFiller.json",
"ZeroValue_DELEGATECALLFiller.json",
"ZeroValue_SUICIDEFiller.json",
"ZeroValue_TransactionCALLFiller.json",
"ZeroValue_CALLCODE_OOGRevertFiller.json",
"ZeroValue_TransactionCALLwithDataFiller.json",
"ZeroValue_CALL_OOGRevertFiller.json",
"ZeroValue_DELEGATECALL_OOGRevertFiller.json",
"ZeroValue_CALLCODEFiller.json",
"ZeroValue_CALLFiller.json",
"createNameRegistratorValueTooHighFiller.json",
"suicideCallerAddresTooBigLeftFiller.json",
"ABAcallsSuicide1Filler.json",
"/stCreate2/",
"/stCreateTest/",
"/stRecursiveCreate/",
"/stWalletTest/",
"/stZeroKnowledge/",
"/stZeroKnowledge2/",
# TODO: See if these can be turned on with fine tuning
"/stTimeConsuming/",
}
def is_incompatible_file(file_path: Path) -> bool:
"""Check if a file should be skipped based on filename or path patterns."""
file_path_str = str(file_path)
filename = file_path.name
# Check if filename is in FALSE_POSITIVE_TESTS
if filename in FALSE_POSITIVE_TESTS:
return True
# Check against all incompatible path patterns
for pattern in INCOMPATIBLE_PATH_PATTERNS:
# If pattern ends with '/', it's a directory pattern
if pattern.endswith("/"):
if pattern in file_path_str:
return True
# Otherwise, check if pattern matches filename or is in path
elif pattern == filename or pattern in file_path_str:
return True
return False
DO_NOT_TAG_ADDRESSES = {
"transStorageResetFiller.yml": {"000000000000000000000000000000003f8390d5"},
"transStorageOKFiller.yml": {
"000000000000000000000000000000005d7935df",
"00000000000000000000000000000000000057a7",
"00000000000000000000000000000000c54b5829",
"000000000000000000000000000000007f9317bd",
"000000000000000000000000000000000000add1",
"000000000000000000000000000000007074a486",
"00000000000000000000000000000000264bb86a",
"000000000000000000000000000000005114e2c8",
"00000000000000000000000000000000ca11bacc",
"00000000000000000000000000000000c1c922f1",
"000000000000000000000000000000006e3a7204",
"00000000000000000000000000000000ebd141d5",
},
"invalidAddrFiller.yml": {
"0000000000000000000000000000000000dead01",
"0000000000000000000000000000000000dead02",
},
"OOGinReturnFiller.yml": {
"ccccccccccccccccccccccccccccccccccccccc1",
"ccccccccccccccccccccccccccccccccccccccc2",
},
"callToSuicideThenExtcodehashFiller.json": {"0000000000000000000000000000000000000025"},
"doubleSelfdestructTouch_ParisFiller.yml": {
"0000000000000000000000000000000000e49701",
"0000000000000000000000000000000000e49702",
},
}
SHORT_NAME_FILLERS = {
"transStorageResetFiller.yml",
"invalidAddrFiller.yml",
"precompsEIP2929CancunFiller.yml",
"addressOpcodesFiller.yml",
"coinbaseT01Filler.yml",
"coinbaseT2Filler.yml",
"doubleSelfdestructTouch_ParisFiller.yml",
"tooLongReturnDataCopyFiller.yml",
"coinbaseWarmAccountCallGasFailFiller.yml",
}
# Fillers that should have precompile check disabled
# These tests intentionally use addresses that map to precompiles
DISABLE_PRECOMPILE_CHECK_FILLERS = { # type: ignore[var-annotated]
# "block504980Filler.json",
}
# Fillers where addresses are tagged in pre/result sections only
# No address replacement in code or storage values
NO_TAGS_IN_CODE = {
# Add fillers here that should not have addresses replaced in code/storage
"modexpFiller.json",
"returndatacopyPythonBug_Tue_03_48_41-1432Filler.json",
"/stZeroKnowledge",
"modexp_modsize0_returndatasizeFiller.json",
"RevertPrecompiledTouchExactOOG_ParisFiller.json",
"randomStatetest51Filler.json", # address is used as integer value
"randomStatetest565Filler.json", # address is used as integer value
"randomStatetest601Filler.json", # address is used as integer value
}
# Fillers that should skip entropy validation entirely
# These tests are deemed safe to convert addresses even if they don't pass entropy check
VALIDATE_ADDR_ENTROPY_IN_CODE = { # type: ignore[var-annotated]
# Add more fillers here that should skip entropy validation
# "static_log_CallerFiller.json",
# "OOGMAfterFiller.json",
# "OOGMAfter2Filler.json",
# "OOGMAfter_2Filler.json",
# "OOGMAfter_3Filler.json",
# "OOGMAfterFiller.json",
# "OOGMAfter2Filler.json",
# "OOGMBeforeFiller.json",
# "SuicideEndFiller.json",
# "SuicideEnd2Filler.json",
# "SuicideMiddleFiller.json",
# "OOGEFiller.json",
# "static_CheckOpcodesFiller.json",
# "static_CheckOpcodes2Filler.json",
# "static_CheckOpcodes3Filler.json",
# "static_CheckOpcodes4Filler.json",
# "static_CheckOpcodes5Filler.json",
}
# Fillers that should convert addresses with shouldnotexist in result section
# These tests use CREATE/CREATE2 and need shouldnotexist addresses to be converted
SHOULDNOTEXIST_NO_CONVERSION_FILLERS = {
# Add more fillers here as needed
"callToSuicideThenExtcodehashFiller.json",
}
# Fillers where addresses in storage keys should be replaced with tags
# By default, storage keys are not replaced to avoid issues
DONT_REPLACE_TAGS_IN_STORAGE_KEYS = { # type: ignore[var-annotated]
# Add fillers here that should have addresses replaced in storage keys
# Example: tests that use addresses as storage keys intentionally
# "randomStatetest383Filler.json",
}
def normalize_address(addr: str) -> str:
"""Normalize address to lowercase without 0x prefix."""
if not addr:
return addr
addr = addr.strip().strip("\"'")
if addr.startswith("0x") or addr.startswith("0X"):
addr = addr[2:]
return addr.lower()
def calculate_entropy(addr: str) -> float:
"""Calculate address entropy (0-1) to determine if it's too simple/mathematical."""
if not addr or len(addr) != 40:
return 0.0
# Count unique characters
unique_chars = len(set(addr))
zero_count = addr.count("0")
# Very simple addresses like 0x1000... or 0x2000...
if unique_chars <= 3 and zero_count >= 35:
return 0.1 # Too simple
# Numerical patterns like 0x0000..., 0x1111..., 0x2222... (single digit repeated)
if len(set(addr)) == 1 and addr[0] in "0123456789":
return 0.1 # Too simple - numerical value
# Test pattern addresses like 0xcccc... or 0xaaaa... (letters repeated)
if len(set(addr)) == 1:
return 0.8 # Clearly a test address
return 0.5 # Default: replace it
class SimpleAddressConverter:
"""Simple two-pass converter."""
def __init__(self, filename: str = ""): # noqa: D107
self.filename = filename
self.address_mappings: Dict[str, str] = {} # addr -> tag
self.pre_addresses: Set[str] = set() # addresses from pre section
self.addresses_with_code: Set[str] = set()
self.creation_addresses: Set[str] = (
set()
) # addresses from result section with shouldnotexist
self.coinbase_addr: Optional[str] = None
self.target_addr: Optional[str] = None
self.is_json = filename.lower().endswith(".json")
self.skip_precompile_check = any(kw in filename for kw in DISABLE_PRECOMPILE_CHECK_FILLERS)
self.no_tags_in_code = any(kw in filename for kw in NO_TAGS_IN_CODE)
self.validate_addr_entropy_in_code = any(
kw in filename for kw in VALIDATE_ADDR_ENTROPY_IN_CODE
)
self.dont_convert_shouldnotexist = any(
kw in filename for kw in SHOULDNOTEXIST_NO_CONVERSION_FILLERS
)
self.dont_replace_tags_in_storage_keys = any(
kw in filename for kw in DONT_REPLACE_TAGS_IN_STORAGE_KEYS
)
# Get addresses that should not be tagged for this specific test file
self.do_not_tag_addresses: Set[str] = set()
if filename in DO_NOT_TAG_ADDRESSES:
self.do_not_tag_addresses = DO_NOT_TAG_ADDRESSES[filename]
# Check if this is a short name filler
self.is_short_name_filler = any(kw in filename for kw in SHORT_NAME_FILLERS)
self.short_name_mappings: Dict[str, str] = {} # short_name -> tag
def detect_section(self, line: str) -> Optional[Section]:
"""Detect which section a line indicates."""
stripped = line.strip().replace('"', "").replace("'", "").replace(" ", "")
if "pre:" in stripped:
return Section.PRE
elif "env:" in stripped:
return Section.ENV
elif "transaction:" in stripped:
return Section.TRANSACTION
elif "result:" in stripped:
return Section.RESULT
elif "expect:" in stripped or "_info:" in stripped:
return Section.EXPECT
return None
def detect_context_change(self, line: str) -> Optional[Context]:
"""Detect if line changes the context."""
stripped = line.strip().replace('"', "").replace("'", "").replace(" ", "")
# Check for context-changing keywords
if any(kw in stripped for kw in {"code:", "data:", "raw:"}):
return Context.CODE
elif "storage:" in stripped:
# If storage is followed by {}, it's an empty storage and context should stay NORMAL
if "storage:{}" in stripped or re.search(r"storage:\s*\{\s*\}", line):
return None # Don't change context for empty storage
return Context.STORAGE
# Known fields that indicate we're back in NORMAL context
elif any(
kw in stripped
for kw in {
"balance:",
"nonce:",
"secretKey:",
"gasLimit:",
"gasPrice:",
"value:",
"to:",
"from:",
"address:",
"shouldnotexist:",
"indexes:",
"network:",
"result:",
}
):
return Context.NORMAL
return None
def create_short_name(self, addr: str) -> Optional[str]:
"""
Create a short name from an address by stripping leading/trailing zeros.
Args:
addr: Normalized address (40 chars, lowercase, no 0x prefix)
Returns:
Short name with 0x prefix, or None if the address is all zeros or too short
"""
if not addr or len(addr) != 40:
return None
# Strip leading zeros
lstripped = addr.lstrip("0")
# If nothing left after stripping leading zeros, it's all zeros
if not lstripped:
return None
# Strip trailing zeros
stripped = lstripped.rstrip("0")
# If nothing left after stripping trailing zeros, or too short, return None
if not stripped or len(stripped) < 2:
if len(lstripped) >= 2:
return f"0x{lstripped}"
return None
return f"0x{stripped}"
def collect_addresses(self, lines: List[str]) -> None:
"""First pass: collect all addresses from the file (unified for JSON and YAML)."""
current_section = Section.NONE
current_context = Context.NORMAL
current_address = None
current_result_address = None
looking_for_shouldnotexist = False
for _, line in enumerate(lines, 1):
stripped = line.strip()
stripped_no_spaces_or_quotes = (
stripped.replace('"', "").replace("'", "").replace(" ", "").replace(",", "")
)
# Check for section changes
new_section = self.detect_section(line)
if new_section:
current_section = new_section
current_context = Context.NORMAL
current_address = None
current_result_address = None
looking_for_shouldnotexist = False
# Reset context when we see a new address key in pre/result sections
# This handles YAML structure where addresses are top-level keys
# Do this FIRST before any other context checks
if current_section in [Section.PRE, Section.RESULT]:
# Check if this line is an address key (40 hex chars followed by colon)
# Also check for quoted addresses in JSON format
if re.match(r"^\s*(?:0x)?[a-fA-F0-9]{40}\s*:", line, re.IGNORECASE) or re.match(
r'^\s*"(?:0x)?[a-fA-F0-9]{40}"\s*:', line, re.IGNORECASE
):
current_context = Context.NORMAL
# Don't reset context on closing braces - let field names determine context
# Only collect addresses from pre section
if current_section == Section.PRE:
# Look for address patterns (with or without quotes, with or without 0x prefix)
# Matches: "address":, "0xaddress":, address:, 0xaddress:, <tag>:
addr_match = None
# Plain address pattern (40 hex chars followed by colon)
# Handle both JSON format ("address":) and YAML format (address:)
# Use word boundaries to ensure we match exactly 40 hex chars,
# not part of a longer string
match = re.search(
r'["\']?(?:0x)?(?<![a-fA-F0-9])([a-fA-F0-9]{40})(?![a-fA-F0-9])["\']?\s*:',
line,
)
if match:
addr_match = match
else:
# Tagged address pattern
match = re.search(
r"<(?:contract|eoa)(?::[^:]+)?:(?:0x)?([a-fA-F0-9]{40})>\s*:",
line,
)
if match:
addr_match = match
if addr_match:
addr = normalize_address(addr_match.group(1))
if addr not in self.do_not_tag_addresses:
self.address_mappings[addr] = None # type: ignore[assignment]
self.pre_addresses.add(addr)
current_address = addr
continue
# Check for addresses in result section only for shouldnotexist logic
if current_section == Section.RESULT and current_context != Context.CODE:
# Look for address keys in result section
match = re.search(
r'["\']?(?:0x)?(?<![a-fA-F0-9])([a-fA-F0-9]{40})(?![a-fA-F0-9])["\']?\s*:',
line,
)
if match:
current_result_address = normalize_address(match.group(1))
looking_for_shouldnotexist = True
# Check for shouldnotexist after setting current_result_address
if looking_for_shouldnotexist and current_result_address:
if "shouldnotexist:1" in stripped_no_spaces_or_quotes:
# Don't convert shouldnotexist addresses if enabled for this filler
if not self.dont_convert_shouldnotexist:
# Only now add to address_mappings and mark as creation address
self.address_mappings[current_result_address] = None # type: ignore[assignment]
self.creation_addresses.add(current_result_address)
looking_for_shouldnotexist = False
current_result_address = None
elif stripped and stripped[-1] in {"}", "]"}:
# End of address block without finding shouldnotexist -leave address hard-coded
looking_for_shouldnotexist = False
current_result_address = None
# Check if current address (from pre) has code
if current_address and current_section == Section.PRE:
# For YAML files, check both when we see the code: line and during CODE context
if not self.is_json:
# First, check if this is the code: line itself
if "code:" in stripped_no_spaces_or_quotes:
# Extract content after the colon
if ":" in line:
code_content = line.split(":", 1)[1].strip()
# Remove quotes and check if there's actual content
code_content = code_content.strip("'\"")
# Only consider it has code if it's not empty or "0x"
if (
code_content
and code_content not in ["", "0x", "0X", "{}", "[]"]
and (code_content in ["|", ">"] or len(code_content) > 2)
):
self.addresses_with_code.add(current_address)
# Also check during CODE context for multi-line code
elif current_context == Context.CODE:
# Don't add if this line should change context back to NORMAL
if not any(
kw in stripped_no_spaces_or_quotes
for kw in {"balance:", "nonce:", "storage:"}
):
if stripped and not stripped.startswith("#") and "{" not in stripped:
# Any non-empty, non-comment, non-brace line in code section
# indicates there's code
self.addresses_with_code.add(current_address)
# For JSON files, check code lines directly
elif self.is_json and ('"code":' in line or '"code" :' in line):
# Extract content after the colon
if ":" in line:
code_content = line.split(":", 1)[1].strip()
# Remove quotes and check if there's actual content
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | true |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/conftest.py | src/conftest.py | """Local pytest configuration used on multiple framework tests."""
import os
from typing import Dict, Generator
import pytest
from ethereum_clis import BesuTransitionTool, ExecutionSpecsTransitionTool, TransitionTool
DEFAULT_TRANSITION_TOOL_FOR_UNIT_TESTS = ExecutionSpecsTransitionTool
INSTALLED_TRANSITION_TOOLS = [
transition_tool
for transition_tool in TransitionTool.registered_tools
if (
transition_tool.is_installed()
# Currently, Besu has the same `default_binary` as Geth, so we can't
# use `is_installed`.
and transition_tool != BesuTransitionTool
)
]
@pytest.fixture(scope="session")
def installed_transition_tool_instances() -> Generator[
Dict[str, TransitionTool | Exception], None, None
]:
"""Return all instantiated transition tools."""
instances: Dict[str, TransitionTool | Exception] = {}
for transition_tool_class in INSTALLED_TRANSITION_TOOLS:
try:
transition_tool_instance = transition_tool_class()
transition_tool_instance.start_server()
instances[transition_tool_class.__name__] = transition_tool_instance
except Exception as e:
# Record the exception in order to provide context when failing the
# appropriate test
instances[transition_tool_class.__name__] = e
yield instances
for instance in instances.values():
if isinstance(instance, TransitionTool):
instance.shutdown()
@pytest.fixture(
params=INSTALLED_TRANSITION_TOOLS,
ids=[transition_tool_class.__name__ for transition_tool_class in INSTALLED_TRANSITION_TOOLS],
)
def installed_t8n(
request: pytest.FixtureRequest,
installed_transition_tool_instances: Dict[str, TransitionTool | Exception],
) -> TransitionTool:
"""
Return an instantiated transition tool.
Tests using this fixture will be automatically parameterized with all
installed transition tools.
"""
transition_tool_class = request.param
assert issubclass(transition_tool_class, TransitionTool)
assert transition_tool_class.__name__ in installed_transition_tool_instances, (
f"{transition_tool_class.__name__} not instantiated"
)
instance_or_error = installed_transition_tool_instances[transition_tool_class.__name__]
if isinstance(instance_or_error, Exception):
raise Exception(
f"Failed to instantiate {transition_tool_class.__name__}"
) from instance_or_error
return instance_or_error
@pytest.fixture
def default_t8n(
installed_transition_tool_instances: Dict[str, TransitionTool | Exception],
) -> TransitionTool:
"""Fixture to provide a default t8n instance."""
instance = installed_transition_tool_instances.get(
DEFAULT_TRANSITION_TOOL_FOR_UNIT_TESTS.__name__
)
if instance is None:
raise Exception(f"Failed to instantiate {DEFAULT_TRANSITION_TOOL_FOR_UNIT_TESTS.__name__}")
if isinstance(instance, Exception):
raise Exception(
f"Failed to instantiate {DEFAULT_TRANSITION_TOOL_FOR_UNIT_TESTS.__name__}"
) from instance
return instance
@pytest.fixture(scope="session")
def running_in_ci() -> bool:
"""Return whether the test is running in a CI environment."""
return "CI" in os.environ
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_checklists/eip_checklist.py | src/ethereum_test_checklists/eip_checklist.py | """
EIP Testing Checklist Enum definitions.
Note: This module includes a companion .pyi stub file that provides mypy type
hints for making EIPChecklist classes callable. The stub file is auto-generated
using: uv run generate_checklist_stubs
If you modify the EIPChecklist class structure, regenerate the stub file to
maintain proper type checking support.
"""
import re
import pytest
def camel_to_snake(name: str) -> str:
"""Convert CamelCase to snake_case."""
# Insert an underscore before any uppercase letter that follows a lowercase
# letter
s1 = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name)
# Insert an underscore before any uppercase letter that follows a lowercase
# letter or number
return re.sub("([a-z0-9])([A-Z])", r"\1_\2", s1).lower()
class ChecklistItemMeta(type):
"""Metaclass for checklist items that provides string representation."""
_path: str = ""
_override_name: str = ""
def __new__(mcs, name: str, bases: tuple, namespace: dict, **kwargs): # noqa: N804
"""Create a new class with the parent path set."""
parent_path = kwargs.get("parent_path", "")
override_name = kwargs.get("override_name", None)
# Create the class
cls = super().__new__(mcs, name, bases, namespace)
cls._override_name = override_name
# Set the path for this class
item_name = override_name if override_name is not None else camel_to_snake(name)
if parent_path:
# Convert class name to snake_case and append to parent path
cls._path = f"{parent_path}/{item_name}"
else:
cls._path = item_name
# Process nested classes
for attr_name, attr_value in namespace.items():
if isinstance(attr_value, type) and not attr_name.startswith("_"):
# Create a new class with the parent path set
assert isinstance(attr_value, ChecklistItemMeta)
nested_cls = ChecklistItemMeta(
attr_value.__name__,
attr_value.__bases__,
dict(attr_value.__dict__),
parent_path=cls._path,
override_name=attr_value._override_name,
)
setattr(cls, attr_name, nested_cls)
return cls
def __str__(cls) -> str:
"""Return the path for this checklist item."""
return cls._path
def __repr__(cls) -> str:
"""Return a representation of this checklist item."""
return f"<ChecklistItem: {cls._path}>"
def __call__(cls, *args, **kwargs):
"""Return a pytest mark decorator for the checklist item."""
# If called with a function as the first argument (direct decorator
# usage) and no other arguments, apply the decorator to the function
if len(args) == 1 and len(kwargs) == 0 and callable(args[0]):
func = args[0]
marker = pytest.mark.eip_checklist(cls._path)
return marker(func)
# Otherwise, return a pytest mark decorator
return pytest.mark.eip_checklist(cls._path, *args, **kwargs)
class ChecklistItem(metaclass=ChecklistItemMeta):
"""Base class for checklist items."""
pass
class EIPChecklist:
"""
Main namespace for EIP testing checklist items.
This class provides a structured way to reference checklist items for EIP
testing. The class structure is automatically converted to callable pytest
markers.
Note: If you modify this class structure, regenerate the type stub file
using: uv run generate_checklist_stubs
Examples:
@EIPChecklist.Opcode.Test.GasUsage.Normal()
def test_normal_gas():
pass
@EIPChecklist.Opcode.Test.StackOverflow
def test_stack_overflow():
pass
"""
class General(ChecklistItem):
"""General checklist items."""
class CodeCoverage(ChecklistItem):
"""Code coverage checklist items."""
class Eels(ChecklistItem):
"""EELS code coverage."""
pass
class TestCoverage(ChecklistItem):
"""Test code coverage."""
pass
class SecondClient(ChecklistItem):
"""Second client code coverage."""
pass
class Opcode(ChecklistItem):
"""New opcode checklist items."""
class Test(ChecklistItem):
"""Test vectors for new opcode."""
class MemExp(ChecklistItem):
"""Memory expansion tests."""
class ZeroBytesZeroOffset(ChecklistItem):
"""Zero bytes expansion with zero-offset."""
pass
class ZeroBytesMaxOffset(ChecklistItem):
"""Zero bytes expansion with 2**256-1 offset."""
pass
class SingleByte(ChecklistItem):
"""Single byte expansion."""
pass
class ThirtyOneBytes(ChecklistItem, override_name="31_bytes"):
"""31 bytes expansion."""
pass
class ThirtyTwoBytes(ChecklistItem, override_name="32_bytes"):
"""32 bytes expansion."""
pass
class ThirtyThreeBytes(ChecklistItem, override_name="33_bytes"):
"""33 bytes expansion."""
pass
class SixtyFourBytes(ChecklistItem, override_name="64_bytes"):
"""64 bytes expansion."""
pass
class TwoThirtyTwoMinusOneBytes(
ChecklistItem, override_name="2_32_minus_one_bytes"
):
"""2**32-1 bytes expansion."""
pass
class TwoThirtyTwoBytes(ChecklistItem, override_name="2_32_bytes"):
"""2**32 bytes expansion."""
pass
class TwoSixtyFourMinusOneBytes(
ChecklistItem, override_name="2_64_minus_one_bytes"
):
"""2**64-1 bytes expansion."""
pass
class TwoSixtyFourBytes(ChecklistItem, override_name="2_64_bytes"):
"""2**64 bytes expansion."""
pass
class TwoTwoFiftySixMinusOneBytes(
ChecklistItem, override_name="2_256_minus_one_bytes"
):
"""2**256-1 bytes expansion."""
pass
class StackOverflow(ChecklistItem):
"""Stack overflow test."""
pass
class StackUnderflow(ChecklistItem):
"""Stack underflow test."""
pass
class StackComplexOperations(ChecklistItem):
"""Stack complex operations tests."""
class StackHeights(ChecklistItem):
"""Stack height tests."""
class Zero(ChecklistItem):
"""Operation on an empty stack."""
pass
class Odd(ChecklistItem):
"""Operation on a stack with odd height."""
pass
class Even(ChecklistItem):
"""Operation on a stack with even height."""
pass
class DataPortionVariables(ChecklistItem, override_name="data_portion_variables"):
"""
If the opcode contains variables in its data portion, for
each variable `n` of the opcode that accesses the nth stack
item, test `n` being.
"""
class Top(ChecklistItem):
"""`n` is the top stack item."""
pass
class Bottom(ChecklistItem):
"""`n` is the bottom stack item."""
pass
class Middle(ChecklistItem):
"""`n` is the middle stack item."""
pass
class ExecutionContext(ChecklistItem):
"""Execution context tests."""
class Call(ChecklistItem):
"""CALL context."""
pass
class Staticcall(ChecklistItem):
"""STATICCALL context tests."""
class BanCheck(ChecklistItem):
"""Ban check for state modifications."""
pass
class BanNoModification(ChecklistItem):
"""Ban even without modifications."""
pass
class SubCalls(ChecklistItem):
"""Sub-calls verification."""
pass
class Delegatecall(ChecklistItem):
"""DELEGATECALL context."""
pass
class Storage(ChecklistItem):
"""DELEGATECALL storage modification."""
pass
class Balance(ChecklistItem):
"""DELEGATECALL balance modification."""
pass
class Code(ChecklistItem):
"""DELEGATECALL code modification."""
pass
class Callcode(ChecklistItem):
"""CALLCODE context."""
pass
class Initcode(ChecklistItem):
"""Initcode execution tests."""
class Behavior(ChecklistItem):
"""Initcode behavior."""
pass
class Tx(ChecklistItem):
"""Initcode from transaction."""
pass
class Opcode(ChecklistItem):
"""Initcode from opcode."""
pass
class Reentry(ChecklistItem):
"""Initcode re-entry."""
pass
class SetCode(ChecklistItem):
"""Set-code delegated account."""
pass
class TxContext(ChecklistItem):
"""Transaction context dependent."""
pass
class BlockContext(ChecklistItem):
"""Block context dependent."""
pass
class ReturnData(ChecklistItem):
"""Return data tests."""
class Buffer(ChecklistItem):
"""Return buffer tests."""
class Current(ChecklistItem):
"""Return buffer at current call context."""
pass
class Parent(ChecklistItem):
"""Return buffer at parent call context."""
pass
class GasUsage(ChecklistItem):
"""Gas usage tests."""
class Normal(ChecklistItem):
"""Normal operation gas usage."""
pass
class MemoryExpansion(ChecklistItem):
"""Memory expansion gas usage."""
pass
class OutOfGasExecution(ChecklistItem):
"""Out-of-gas due to opcode inputs."""
pass
class OutOfGasMemory(ChecklistItem):
"""Out-of-gas due to memory expansion."""
pass
class ExtraGas(ChecklistItem):
"""Extra gas usage tests."""
pass
class OrderOfOperations(ChecklistItem):
"""Order of operations tests."""
class Exact(ChecklistItem):
"""Exact gas required."""
pass
class Oog(ChecklistItem):
"""Out-of-gas with 1 gas difference."""
pass
class Terminating(ChecklistItem):
"""Terminating opcode tests."""
class Scenarios(ChecklistItem):
"""Termination scenarios."""
class TopLevel(ChecklistItem):
"""Top-level call termination."""
pass
class SubLevel(ChecklistItem):
"""Sub-level call termination."""
pass
class Initcode(ChecklistItem):
"""Initcode termination."""
pass
class Rollback(ChecklistItem):
"""Rollback tests."""
class Balance(ChecklistItem):
"""Balance changes rollback."""
pass
class Storage(ChecklistItem):
"""Storage changes rollback."""
pass
class Contracts(ChecklistItem):
"""Contract creations rollback."""
pass
class Nonce(ChecklistItem):
"""Nonce increments rollback."""
pass
class Logs(ChecklistItem):
"""Log events rollback."""
pass
class OutOfBounds(ChecklistItem):
"""Out-of-bounds checks."""
class Verify(ChecklistItem):
"""Verification tests."""
class Max(ChecklistItem):
"""Max value for each parameter."""
pass
class MaxPlusOne(ChecklistItem):
"""Max value + 1 for each parameter."""
pass
class ExceptionalAbort(ChecklistItem):
"""Exceptional abort conditions."""
pass
class DataPortion(ChecklistItem):
"""Data portion tests."""
class AllZeros(ChecklistItem):
"""All zeros data portion."""
pass
class MaxValue(ChecklistItem):
"""Max value data portion."""
pass
class Jump(ChecklistItem):
"""Jump into the data portion."""
pass
class ContractCreation(ChecklistItem):
"""Contract creation tests."""
class Address(ChecklistItem):
"""Address calculation."""
pass
class Failure(ChecklistItem):
"""Creation failure tests."""
class Oog(ChecklistItem):
"""Out-of-gas failure."""
pass
class InsufficientValue(ChecklistItem):
"""Insufficient value failure."""
pass
class Collision(ChecklistItem):
"""Address collision failure."""
pass
class Recursive(ChecklistItem):
"""Recursive contract creation."""
pass
class ForkTransition(ChecklistItem):
"""Fork transition tests."""
class Invalid(ChecklistItem):
"""Invalid before/after fork."""
pass
class At(ChecklistItem):
"""Behavior at transition block."""
pass
class Precompile(ChecklistItem):
"""New precompile checklist items."""
class Test(ChecklistItem):
"""Test vectors for new precompile."""
class CallContexts(ChecklistItem):
"""Call context tests."""
class Normal(ChecklistItem):
"""CALL context."""
pass
class Delegate(ChecklistItem):
"""DELEGATECALL context."""
pass
class Static(ChecklistItem):
"""STATICCALL context."""
pass
class Callcode(ChecklistItem):
"""CALLCODE context."""
pass
class TxEntry(ChecklistItem):
"""Transaction entry-point."""
pass
class Initcode(ChecklistItem):
"""Initcode call tests."""
class CREATE(ChecklistItem, override_name="CREATE"):
"""Call from CREATE/CREATE2 initcode."""
pass
class Tx(ChecklistItem):
"""Call from transaction initcode."""
pass
class SetCode(ChecklistItem):
"""Set-code delegated address."""
pass
class Inputs(ChecklistItem):
"""Input tests."""
class Valid(ChecklistItem):
"""Valid inputs."""
class Boundary(ChecklistItem):
"""Valid boundary values."""
pass
class Crypto(ChecklistItem):
"""Valid cryptographic inputs."""
pass
class AllZeros(ChecklistItem):
"""All zeros input."""
pass
class MaxValues(ChecklistItem):
"""Max values input."""
pass
class Invalid(ChecklistItem):
"""Invalid inputs."""
class Crypto(ChecklistItem):
"""Invalid cryptographic inputs."""
pass
class Corrupted(ChecklistItem):
"""Corrupted inputs."""
pass
class ValueTransfer(ChecklistItem):
"""Value transfer tests."""
class Fee(ChecklistItem):
"""Fee-based precompile tests."""
class Under(ChecklistItem):
"""Under required fee."""
pass
class Exact(ChecklistItem):
"""Exact required fee."""
pass
class Over(ChecklistItem):
"""Over required fee."""
pass
class NoFee(ChecklistItem):
"""No-fee precompile."""
pass
class OutOfBounds(ChecklistItem):
"""Out-of-bounds checks."""
class Max(ChecklistItem):
"""Max value for each input."""
pass
class MaxPlusOne(ChecklistItem):
"""Max value + 1 for each input."""
pass
class InputLengths(ChecklistItem):
"""Input length tests."""
class Zero(ChecklistItem):
"""Zero-length calldata."""
pass
class Static(ChecklistItem):
"""Static input length tests."""
class Correct(ChecklistItem):
"""Correct static-length calldata."""
pass
class TooShort(ChecklistItem):
"""Calldata too short."""
pass
class TooLong(ChecklistItem):
"""Calldata too long."""
pass
class Dynamic(ChecklistItem):
"""Dynamic input length tests."""
class Valid(ChecklistItem):
"""Valid dynamic lengths."""
pass
class TooShort(ChecklistItem):
"""Calldata too short."""
pass
class TooLong(ChecklistItem):
"""Calldata too long."""
pass
class GasUsage(ChecklistItem):
"""Gas usage tests."""
class Constant(ChecklistItem):
"""Constant gas cost tests."""
class Exact(ChecklistItem):
"""Exact gas consumption."""
pass
class Oog(ChecklistItem):
"""Out-of-gas error."""
pass
class Dynamic(ChecklistItem):
"""Dynamic gas cost tests."""
class Exact(ChecklistItem):
"""Exact gas consumption."""
pass
class Oog(ChecklistItem):
"""Out-of-gas error."""
pass
class ExcessiveGasUsage(ChecklistItem):
"""Excessive gas usage."""
pass
class ForkTransition(ChecklistItem):
"""Fork transition tests."""
class Before(ChecklistItem):
"""Before fork activation tests."""
class InvalidInput(ChecklistItem):
"""Invalid input call."""
pass
class ZeroGas(ChecklistItem):
"""Zero-gas call."""
pass
class Cold(ChecklistItem):
"""Cold precompile address."""
pass
class After(ChecklistItem):
"""After fork activation tests."""
class Warm(ChecklistItem):
"""Warm precompile address."""
pass
class RemovedPrecompile(ChecklistItem):
"""Removed precompile checklist items."""
class Test(ChecklistItem):
"""Test vectors for removed precompile."""
class ForkTransition(ChecklistItem):
"""Fork transition tests."""
class Operational(ChecklistItem):
"""Precompile operation on fork activation."""
pass
class Before(ChecklistItem):
"""Before fork tests."""
class Warm(ChecklistItem):
"""Warm precompile address."""
pass
class After(ChecklistItem):
"""After fork tests."""
class Cold(ChecklistItem):
"""Cold precompile address."""
pass
class SystemContract(ChecklistItem):
"""New system contract checklist items."""
class Test(ChecklistItem):
"""Test vectors for new system contract."""
class CallContexts(ChecklistItem):
"""Call context tests."""
class Normal(ChecklistItem):
"""CALL context."""
pass
class Delegate(ChecklistItem):
"""DELEGATECALL context."""
pass
class Static(ChecklistItem):
"""STATICCALL context."""
pass
class Callcode(ChecklistItem):
"""CALLCODE context."""
pass
class TxEntry(ChecklistItem):
"""Transaction entry-point."""
pass
class Initcode(ChecklistItem):
"""Initcode call tests."""
class CREATE(ChecklistItem, override_name="CREATE"):
"""Call from CREATE/CREATE2 initcode."""
pass
class Tx(ChecklistItem):
"""Call from transaction initcode."""
pass
class SetCode(ChecklistItem):
"""Set-code delegated address."""
pass
class Inputs(ChecklistItem):
"""Input tests."""
class Valid(ChecklistItem):
"""Valid inputs."""
pass
class Boundary(ChecklistItem):
"""Boundary values."""
pass
class AllZeros(ChecklistItem):
"""All zeros input."""
pass
class MaxValues(ChecklistItem):
"""Max values input."""
pass
class Invalid(ChecklistItem):
"""Invalid inputs."""
class Checks(ChecklistItem):
"""Invalid validity checks."""
pass
class Crypto(ChecklistItem):
"""Invalid cryptographic inputs."""
pass
class Corrupted(ChecklistItem):
"""Corrupted inputs."""
pass
class ValueTransfer(ChecklistItem):
"""Value transfer tests."""
class Fee(ChecklistItem):
"""Fee-based system contract tests."""
class Under(ChecklistItem):
"""Under required fee."""
pass
class Exact(ChecklistItem):
"""Exact required fee."""
pass
class Over(ChecklistItem):
"""Over required fee."""
pass
class NoFee(ChecklistItem):
"""No-fee system contract."""
pass
class OutOfBounds(ChecklistItem):
"""Out-of-bounds checks."""
class Max(ChecklistItem):
"""Max value for each input."""
pass
class MaxPlusOne(ChecklistItem):
"""Max value + 1 for each input."""
pass
class InputLengths(ChecklistItem):
"""Input length tests."""
class Zero(ChecklistItem):
"""Zero-length calldata."""
pass
class Static(ChecklistItem):
"""Static input length tests."""
class Correct(ChecklistItem):
"""Correct static-length calldata."""
pass
class TooShort(ChecklistItem):
"""Calldata too short."""
pass
class TooLong(ChecklistItem):
"""Calldata too long."""
pass
class Dynamic(ChecklistItem):
"""Dynamic input length tests."""
class Valid(ChecklistItem):
"""Valid dynamic lengths."""
pass
class TooShort(ChecklistItem):
"""Calldata too short."""
pass
class TooLong(ChecklistItem):
"""Calldata too long."""
pass
class GasUsage(ChecklistItem):
"""Gas usage tests."""
class Constant(ChecklistItem):
"""Constant gas cost tests."""
class Exact(ChecklistItem):
"""Exact gas consumption."""
pass
class Oog(ChecklistItem):
"""Out-of-gas error."""
pass
class Dynamic(ChecklistItem):
"""Dynamic gas cost tests."""
class Exact(ChecklistItem):
"""Exact gas consumption."""
pass
class Oog(ChecklistItem):
"""Out-of-gas error."""
pass
class ExcessiveGas(ChecklistItem):
"""Excessive gas tests."""
class BlockGas(ChecklistItem):
"""Exhaust block gas limit."""
pass
class SystemCall(ChecklistItem):
"""Excessive gas on system call."""
pass
class Deployment(ChecklistItem):
"""Deployment tests."""
class Missing(ChecklistItem):
"""Missing system contract."""
pass
class Address(ChecklistItem):
"""Deployment address verification."""
pass
class ContractVariations(ChecklistItem):
"""Contract variation tests."""
class Networks(ChecklistItem):
"""Different network variations."""
pass
class ContractSubstitution(ChecklistItem):
"""Contract substitution tests."""
class ReturnLengths(ChecklistItem):
"""Modified return value lengths."""
pass
class Logs(ChecklistItem):
"""Modified logs."""
pass
class RaisesException(ChecklistItem, override_name="exception"):
"""Modified to cause exception."""
pass
class GasLimitSuccess(ChecklistItem):
"""30M gas consumption success."""
pass
class GasLimitFailure(ChecklistItem):
"""30M+1 gas consumption failure."""
pass
class ForkTransition(ChecklistItem):
"""Fork transition tests."""
class CallBeforeFork(ChecklistItem):
"""Call system contract before fork."""
pass
class TransactionType(ChecklistItem):
"""New transaction type checklist items."""
class Test(ChecklistItem):
"""Test vectors for new transaction type."""
class IntrinsicValidity(ChecklistItem):
"""Intrinsic validity tests."""
class GasLimit(ChecklistItem):
"""Gas limit tests."""
class Exact(ChecklistItem):
"""Exact intrinsic gas."""
pass
class Insufficient(ChecklistItem):
"""Insufficient gas."""
pass
class MaxFee(ChecklistItem):
"""Max fee tests."""
class MaxPriorityLowerThanMaxFee(ChecklistItem):
"""Max priority < max fee."""
pass
class MaxPriorityEqualToMaxFee(ChecklistItem):
"""Max priority == max fee."""
pass
class BaseLower(ChecklistItem):
"""Max fee < base fee."""
pass
class BaseEqual(ChecklistItem):
"""Max fee == base fee."""
pass
class ChainId(ChecklistItem):
"""Chain ID validation."""
pass
class NonceMinusOne(ChecklistItem):
"""Nonce == sender.nonce - 1."""
pass
class NoncePlusOne(ChecklistItem):
"""Nonce == sender.nonce + 1."""
pass
class NonceExact(ChecklistItem):
"""Nonce == sender.nonce."""
pass
class To(ChecklistItem):
"""To address validation."""
pass
class ValueNonZeroInsufficientBalance(ChecklistItem):
"""Non-zero value with insufficient balance."""
pass
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | true |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_checklists/__init__.py | src/ethereum_test_checklists/__init__.py | """Ethereum test checklists module for EIP testing coverage tracking."""
from .eip_checklist import EIPChecklist
__all__ = ["EIPChecklist"]
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_checklists/tests/test_checklist_template_consistency.py | src/ethereum_test_checklists/tests/test_checklist_template_consistency.py | """Test consistency between checklist template and EIPChecklist class."""
import re
from pathlib import Path
from typing import Any, Set
import pytest
from ethereum_test_checklists.eip_checklist import EIPChecklist
TEMPLATE_PATH = (
Path(__file__).parent.parent.parent.parent
/ "docs"
/ "writing_tests"
/ "checklist_templates"
/ "eip_testing_checklist_template.md"
)
def extract_markdown_ids(markdown_content: str) -> Set[str]:
"""Extract all checklist IDs from markdown content."""
# Pattern to match IDs in markdown tables (between backticks in ID column)
pattern = r"\|\s*`([^`]+)`\s*\|"
ids = set()
for match in re.finditer(pattern, markdown_content):
potential_id = match.group(1)
# Filter out non-ID content - IDs should contain forward slashes
if "/" in potential_id:
ids.add(potential_id)
return ids
def get_all_checklist_ids(obj: Any) -> Set[str]:
"""
Recursively extract all checklist IDs from EIPChecklist and its children.
"""
ids = set()
# Iterate through all attributes of the object
for attr_name in dir(obj):
# Skip private attributes and methods
if attr_name.startswith("_"):
continue
attr = getattr(obj, attr_name)
# Check if this is a class with a _path attribute (our checklist items)
if isinstance(attr, type) and hasattr(attr, "_path"):
# Get the full path for this item
item_path = str(attr)
if item_path: # Only add non-empty paths
ids.add(item_path)
# Recursively get IDs from nested classes
nested_ids = get_all_checklist_ids(attr)
ids.update(nested_ids)
return ids
def test_checklist_template_consistency() -> None:
"""
Test that all IDs in markdown template match EIPChecklist class exactly.
"""
# Read the markdown template
with open(TEMPLATE_PATH, "r", encoding="utf-8") as f:
markdown_content = f.read()
# Extract IDs from both sources
markdown_ids = extract_markdown_ids(markdown_content)
checklist_ids = get_all_checklist_ids(EIPChecklist)
# Find differences
missing_in_checklist = markdown_ids - checklist_ids
missing_in_markdown = checklist_ids - markdown_ids
# Create detailed error messages
errors = []
if missing_in_checklist:
errors.append(
f"IDs found in markdown template but missing in EIPChecklist class "
f"({len(missing_in_checklist)} items):\n"
+ "\n".join(f" - `{id_}`" for id_ in sorted(missing_in_checklist))
)
if missing_in_markdown:
for id_ in missing_in_markdown:
if any(item.startswith(id_ + "/") for item in checklist_ids):
continue
errors.append(f"ID `{id_}` not found in markdown template")
if errors:
error_message = f"\nTotal markdown IDs: {len(markdown_ids)}\n"
error_message += f"Total checklist IDs: {len(checklist_ids)}\n\n"
error_message += "\n\n".join(errors)
pytest.fail(error_message)
def test_checklist_template_exists() -> None:
"""Test that the checklist template file exists."""
assert TEMPLATE_PATH.exists(), f"Checklist template not found at {TEMPLATE_PATH}"
def test_eip_checklist_class_structure() -> None:
"""Test that the EIPChecklist class has expected structure."""
assert hasattr(EIPChecklist, "General"), "EIPChecklist should have General class"
assert hasattr(EIPChecklist, "Opcode"), "EIPChecklist should have Opcode class"
assert hasattr(EIPChecklist, "Precompile"), "EIPChecklist should have Precompile class"
# Test that the metaclass is working correctly
assert str(EIPChecklist.General.CodeCoverage.Eels) == "general/code_coverage/eels"
assert (
str(EIPChecklist.Opcode.Test.MemExp.ZeroBytesZeroOffset)
== "opcode/test/mem_exp/zero_bytes_zero_offset"
)
def test_id_extraction_functions() -> None:
"""Test that our ID extraction functions work correctly."""
# Test markdown extraction
sample_markdown = """
| ID | Description | Status | Tests |
| `test/example/id` | Test description | | |
| `another/test/path` | Another test | | |
"""
ids = extract_markdown_ids(sample_markdown)
assert "test/example/id" in ids
assert "another/test/path" in ids
# Test checklist extraction
checklist_ids = get_all_checklist_ids(EIPChecklist)
assert len(checklist_ids) > 0
assert "general/code_coverage/eels" in checklist_ids
def test_eip_checklist_decorator_usage() -> None:
"""
Test EIPChecklist items work correctly as decorators both with and without
parentheses.
"""
# Test decorator with parentheses
@EIPChecklist.Opcode.Test.StackComplexOperations()
def test_function_with_parens() -> None:
pass
# Verify the marker was applied
markers = list(test_function_with_parens.pytestmark) # type: ignore[attr-defined]
assert len(markers) >= 1
eip_markers = [m for m in markers if m.name == "eip_checklist"]
assert len(eip_markers) == 1
assert eip_markers[0].args == ("opcode/test/stack_complex_operations",)
# Test decorator without parentheses (direct usage - this is the key fix
# for issue #1)
@EIPChecklist.Opcode.Test.StackOverflow
def test_function_no_parens() -> None:
pass
# Verify the marker was applied
markers = list(test_function_no_parens.pytestmark) # type: ignore[attr-defined]
eip_markers = [m for m in markers if m.name == "eip_checklist"]
assert len(eip_markers) == 1
assert eip_markers[0].args == ("opcode/test/stack_overflow",)
def test_eip_checklist_pytest_param_usage() -> None:
"""Test that EIPChecklist works correctly in pytest.param marks."""
# Test that parentheses form works in pytest.param
param_with_parens = pytest.param(
"test_value", marks=EIPChecklist.Opcode.Test.GasUsage.Normal(), id="gas_test"
)
# Verify the parameter was created successfully
assert param_with_parens.values == ("test_value",)
assert param_with_parens.id == "gas_test"
assert len(param_with_parens.marks) == 1
assert param_with_parens.marks[0].name == "eip_checklist" # type: ignore[index]
assert param_with_parens.marks[0].args == ("opcode/test/gas_usage/normal",) # type: ignore[index]
# Test that multiple marks work
param_multiple_marks = pytest.param(
"test_value",
marks=[EIPChecklist.Opcode.Test.StackComplexOperations(), pytest.mark.slow], # type: ignore[list-item]
id="complex_test",
)
# Verify multiple marks
assert len(param_multiple_marks.marks) == 2
eip_mark = next(m for m in param_multiple_marks.marks if m.name == "eip_checklist")
assert eip_mark.args == ("opcode/test/stack_complex_operations",)
# Test that non-parentheses form fails gracefully with pytest.param
# (This documents the expected behavior - parentheses are required)
with pytest.raises((TypeError, AssertionError)):
pytest.param(
"test_value",
# Without () should fail
marks=EIPChecklist.Opcode.Test.StackOverflow, # type: ignore[arg-type]
id="should_fail",
)
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_checklists/tests/__init__.py | src/ethereum_test_checklists/tests/__init__.py | """Tests for ethereum_test_checklists."""
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/block_types.py | src/ethereum_test_types/block_types.py | """Block-related types for Ethereum tests."""
import hashlib
from dataclasses import dataclass
from functools import cached_property
from typing import Any, Dict, Generic, List, Sequence
import ethereum_rlp as eth_rlp
from ethereum_types.numeric import Uint
from pydantic import Field, computed_field
from trie import HexaryTrie
from ethereum_test_base_types import (
Address,
Bytes,
CamelModel,
EmptyOmmersRoot,
Hash,
HexNumber,
NumberBoundTypeVar,
ZeroPaddedHexNumber,
)
from ethereum_test_forks import Fork
DEFAULT_BASE_FEE = 7
CURRENT_MAINNET_BLOCK_GAS_LIMIT = 45_000_000
DEFAULT_BLOCK_GAS_LIMIT = CURRENT_MAINNET_BLOCK_GAS_LIMIT * 2
@dataclass
class EnvironmentDefaults:
"""Default environment values."""
# By default, the constant `DEFAULT_BLOCK_GAS_LIMIT` is used.
# Other libraries (pytest plugins) may override this value by modifying the
# `EnvironmentDefaults.gas_limit` class attribute.
gas_limit: int = DEFAULT_BLOCK_GAS_LIMIT
class WithdrawalGeneric(CamelModel, Generic[NumberBoundTypeVar]):
"""
Withdrawal generic type, used as a parent class for `Withdrawal` and
`FixtureWithdrawal`.
"""
index: NumberBoundTypeVar
validator_index: NumberBoundTypeVar
address: Address
amount: NumberBoundTypeVar
def to_serializable_list(self) -> List[Any]:
"""
Return list of the withdrawal's attributes in the order they should be
serialized.
"""
return [
Uint(self.index),
Uint(self.validator_index),
self.address,
Uint(self.amount),
]
@staticmethod
def list_root(withdrawals: Sequence["WithdrawalGeneric"]) -> bytes:
"""Return withdrawals root of a list of withdrawals."""
t = HexaryTrie(db={})
for i, w in enumerate(withdrawals):
t.set(eth_rlp.encode(Uint(i)), eth_rlp.encode(w.to_serializable_list()))
return t.root_hash
class Withdrawal(WithdrawalGeneric[HexNumber]):
"""Withdrawal type."""
pass
class EnvironmentGeneric(CamelModel, Generic[NumberBoundTypeVar]):
"""Used as a parent class for `Environment` and `FixtureEnvironment`."""
fee_recipient: Address = Field(
Address("0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba"),
alias="currentCoinbase",
)
gas_limit: NumberBoundTypeVar = Field(
default_factory=lambda: EnvironmentDefaults.gas_limit, alias="currentGasLimit"
) # type: ignore
number: NumberBoundTypeVar = Field(1, alias="currentNumber") # type: ignore
timestamp: NumberBoundTypeVar = Field(1_000, alias="currentTimestamp") # type: ignore
prev_randao: NumberBoundTypeVar | None = Field(None, alias="currentRandom")
difficulty: NumberBoundTypeVar | None = Field(None, alias="currentDifficulty")
base_fee_per_gas: NumberBoundTypeVar | None = Field(None, alias="currentBaseFee")
excess_blob_gas: NumberBoundTypeVar | None = Field(None, alias="currentExcessBlobGas")
parent_difficulty: NumberBoundTypeVar | None = Field(None)
parent_timestamp: NumberBoundTypeVar | None = Field(None)
parent_base_fee_per_gas: NumberBoundTypeVar | None = Field(None, alias="parentBaseFee")
parent_gas_used: NumberBoundTypeVar | None = Field(None)
parent_gas_limit: NumberBoundTypeVar | None = Field(None)
class Environment(EnvironmentGeneric[ZeroPaddedHexNumber]):
"""
Structure used to keep track of the context in which a block must be
executed.
"""
blob_gas_used: ZeroPaddedHexNumber | None = Field(None, alias="currentBlobGasUsed")
parent_ommers_hash: Hash = Field(Hash(EmptyOmmersRoot), alias="parentUncleHash")
parent_blob_gas_used: ZeroPaddedHexNumber | None = Field(None)
parent_excess_blob_gas: ZeroPaddedHexNumber | None = Field(None)
parent_beacon_block_root: Hash | None = Field(None)
block_hashes: Dict[ZeroPaddedHexNumber, Hash] = Field(default_factory=dict)
ommers: List[Hash] = Field(default_factory=list)
withdrawals: List[Withdrawal] | None = Field(None)
extra_data: Bytes = Field(Bytes(b"\x00"), exclude=True)
# EIP-7928: Block-level access lists
bal_hash: Hash | None = Field(None)
block_access_lists: Bytes | None = Field(None)
@computed_field # type: ignore[prop-decorator]
@cached_property
def parent_hash(self) -> Hash | None:
"""
Obtains the latest hash according to the highest block number in
`block_hashes`.
"""
if len(self.block_hashes) == 0:
return None
last_index = max(self.block_hashes.keys())
return Hash(self.block_hashes[last_index])
def set_fork_requirements(self, fork: Fork) -> "Environment":
"""Fill required fields in an environment depending on the fork."""
number = self.number
timestamp = self.timestamp
updated_values: Dict[str, Any] = {}
if (
fork.header_prev_randao_required(block_number=number, timestamp=timestamp)
and self.prev_randao is None
):
updated_values["prev_randao"] = 0
if (
fork.header_withdrawals_required(block_number=number, timestamp=timestamp)
and self.withdrawals is None
):
updated_values["withdrawals"] = []
if (
fork.header_base_fee_required(block_number=number, timestamp=timestamp)
and self.base_fee_per_gas is None
and self.parent_base_fee_per_gas is None
):
updated_values["base_fee_per_gas"] = DEFAULT_BASE_FEE
if fork.header_zero_difficulty_required(block_number=number, timestamp=timestamp):
updated_values["difficulty"] = 0
elif self.difficulty is None and self.parent_difficulty is None:
updated_values["difficulty"] = 0x20000
if (
fork.header_excess_blob_gas_required(block_number=number, timestamp=timestamp)
and self.excess_blob_gas is None
and self.parent_excess_blob_gas is None
):
updated_values["excess_blob_gas"] = 0
if (
fork.header_blob_gas_used_required(block_number=number, timestamp=timestamp)
and self.blob_gas_used is None
and self.parent_blob_gas_used is None
):
updated_values["blob_gas_used"] = 0
if (
fork.header_beacon_root_required(block_number=number, timestamp=timestamp)
and self.parent_beacon_block_root is None
):
updated_values["parent_beacon_block_root"] = 0
return self.copy(**updated_values)
def __hash__(self) -> int:
"""Hashes the environment object."""
hash_dict = self.model_dump(exclude_none=True, by_alias=True)
sorted_items = sorted(hash_dict.items())
hash_string = str(sorted_items)
digest = hashlib.sha256(hash_string.encode("utf-8")).digest()
return int.from_bytes(digest[:8], byteorder="big")
def __eq__(self, other: object) -> bool:
"""Check if two environment objects are equal."""
if not isinstance(other, Environment):
return False
self_dict = self.model_dump(exclude_none=True, by_alias=True)
self_dict["extra_data"] = self.extra_data.hex()
other_dict = other.model_dump(exclude_none=True, by_alias=True)
other_dict["extra_data"] = other.extra_data.hex()
return self_dict == other_dict
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/account_types.py | src/ethereum_test_types/account_types.py | """Account-related types for Ethereum tests."""
import json
from dataclasses import dataclass, field
from enum import Enum, auto
from typing import Any, Dict, ItemsView, Iterator, List, Literal, Optional, Self, Tuple
from coincurve.keys import PrivateKey
from ethereum_types.bytes import Bytes20
from ethereum_types.numeric import U256, Bytes32, Uint
from pydantic import PrivateAttr
from ethereum_test_base_types import (
Account,
Address,
Hash,
Number,
Storage,
StorageRootType,
)
from ethereum_test_base_types import Alloc as BaseAlloc
from ethereum_test_base_types.conversions import (
BytesConvertible,
FixedSizeBytesConvertible,
NumberConvertible,
)
from ethereum_test_vm import EVMCodeType
from .trie import EMPTY_TRIE_ROOT, FrontierAccount, Trie, root, trie_get, trie_set
from .utils import keccak256
FrontierAddress = Bytes20
@dataclass
class State:
"""Contains all information that is preserved between transactions."""
_main_trie: Trie[Bytes20, Optional[FrontierAccount]] = field(
default_factory=lambda: Trie(secured=True, default=None)
)
_storage_tries: Dict[Bytes20, Trie[Bytes32, U256]] = field(default_factory=dict)
_snapshots: List[
Tuple[
Trie[Bytes20, Optional[FrontierAccount]],
Dict[Bytes20, Trie[Bytes32, U256]],
]
] = field(default_factory=list)
def set_account(state: State, address: Bytes20, account: Optional[FrontierAccount]) -> None:
"""
Set the `Account` object at an address. Setting to `None` deletes the
account (but not its storage, see `destroy_account()`).
"""
trie_set(state._main_trie, address, account)
def set_storage(state: State, address: Bytes20, key: Bytes32, value: U256) -> None:
"""
Set a value at a storage key on an account. Setting to `U256(0)` deletes
the key.
"""
assert trie_get(state._main_trie, address) is not None
trie = state._storage_tries.get(address)
if trie is None:
trie = Trie(secured=True, default=U256(0))
state._storage_tries[address] = trie
trie_set(trie, key, value)
if trie._data == {}:
del state._storage_tries[address]
def storage_root(state: State, address: Bytes20) -> Bytes32:
"""Calculate the storage root of an account."""
assert not state._snapshots
if address in state._storage_tries:
return root(state._storage_tries[address])
else:
return EMPTY_TRIE_ROOT
def state_root(state: State) -> Bytes32:
"""Calculate the state root."""
assert not state._snapshots
def get_storage_root(address: Bytes20) -> Bytes32:
return storage_root(state, address)
return root(state._main_trie, get_storage_root=get_storage_root)
class EOA(Address):
"""
An Externally Owned Account (EOA) is an account controlled by a private
key.
The EOA is defined by its address and (optionally) by its corresponding
private key.
"""
key: Hash | None
nonce: Number
def __new__(
cls,
address: "FixedSizeBytesConvertible | Address | EOA | None" = None,
*,
key: FixedSizeBytesConvertible | None = None,
nonce: NumberConvertible = 0,
) -> "EOA":
"""Init the EOA."""
if address is None:
if key is None:
raise ValueError("impossible to initialize EOA without address")
private_key = PrivateKey(Hash(key))
public_key = private_key.public_key
address = Address(keccak256(public_key.format(compressed=False)[1:])[32 - 20 :])
elif isinstance(address, EOA):
return address
instance = super(EOA, cls).__new__(cls, address)
instance.key = Hash(key) if key is not None else None
instance.nonce = Number(nonce)
return instance
def get_nonce(self) -> Number:
"""Return current nonce of the EOA and increments it by one."""
nonce = self.nonce
self.nonce = Number(nonce + 1)
return nonce
def copy(self) -> Self:
"""Return copy of the EOA."""
return self.__class__(Address(self), key=self.key, nonce=self.nonce)
class Alloc(BaseAlloc):
"""Allocation of accounts in the state, pre and post test execution."""
_eoa_fund_amount_default: int = PrivateAttr(10**21)
@dataclass(kw_only=True)
class UnexpectedAccountError(Exception):
"""Unexpected account found in the allocation."""
address: Address
account: Account | None
def __str__(self) -> str:
"""Print exception string."""
return f"unexpected account in allocation {self.address}: {self.account}"
@dataclass(kw_only=True)
class MissingAccountError(Exception):
"""Expected account not found in the allocation."""
address: Address
def __str__(self) -> str:
"""Print exception string."""
return f"Account missing from allocation {self.address}"
@dataclass(kw_only=True)
class CollisionError(Exception):
"""Different accounts at the same address."""
address: Address
account_1: Account | None
account_2: Account | None
def to_json(self) -> Dict[str, Any]:
"""Dump to json object."""
return {
"address": self.address.hex(),
"account_1": self.account_1.model_dump(mode="json")
if self.account_1 is not None
else None,
"account_2": self.account_2.model_dump(mode="json")
if self.account_2 is not None
else None,
}
@classmethod
def from_json(cls, obj: Dict[str, Any]) -> Self:
"""Parse from a json dict."""
return cls(
address=Address(obj["address"]),
account_1=Account.model_validate(obj["account_1"])
if obj["account_1"] is not None
else None,
account_2=Account.model_validate(obj["account_2"])
if obj["account_2"] is not None
else None,
)
def __str__(self) -> str:
"""Print exception string."""
return (
"Overlapping key defining different accounts detected:\n"
f"{json.dumps(self.to_json(), indent=2)}"
)
class KeyCollisionMode(Enum):
"""Mode for handling key collisions when merging allocations."""
ERROR = auto()
OVERWRITE = auto()
ALLOW_IDENTICAL_ACCOUNTS = auto()
@classmethod
def merge(
cls,
alloc_1: "Alloc",
alloc_2: "Alloc",
key_collision_mode: KeyCollisionMode = KeyCollisionMode.OVERWRITE,
) -> "Alloc":
"""Return merged allocation of two sources."""
overlapping_keys = alloc_1.root.keys() & alloc_2.root.keys()
if overlapping_keys:
if key_collision_mode == cls.KeyCollisionMode.ERROR:
raise Exception(
f"Overlapping keys detected: {[key.hex() for key in overlapping_keys]}"
)
elif key_collision_mode == cls.KeyCollisionMode.ALLOW_IDENTICAL_ACCOUNTS:
# The overlapping keys must point to the exact same account
for key in overlapping_keys:
account_1 = alloc_1[key]
account_2 = alloc_2[key]
if account_1 != account_2:
raise Alloc.CollisionError(
address=key,
account_1=account_1,
account_2=account_2,
)
merged = alloc_1.model_dump()
for address, other_account in alloc_2.root.items():
merged_account = Account.merge(merged.get(address, None), other_account)
if merged_account:
merged[address] = merged_account
elif address in merged:
merged.pop(address, None)
return Alloc(merged)
def __iter__(self) -> Iterator[Address]: # type: ignore [override]
"""Return iterator over the allocation."""
return iter(self.root)
def items(self) -> ItemsView[Address, Account | None]:
"""Return iterator over the allocation items."""
return self.root.items()
def __getitem__(self, address: Address | FixedSizeBytesConvertible) -> Account | None:
"""Return account associated with an address."""
if not isinstance(address, Address):
address = Address(address)
return self.root[address]
def __setitem__(
self, address: Address | FixedSizeBytesConvertible, account: Account | None
) -> None:
"""Set account associated with an address."""
if not isinstance(address, Address):
address = Address(address)
self.root[address] = account
def __delitem__(self, address: Address | FixedSizeBytesConvertible) -> None:
"""Delete account associated with an address."""
if not isinstance(address, Address):
address = Address(address)
self.root.pop(address, None)
def __eq__(self, other: object) -> bool:
"""Return True if both allocations are equal."""
if not isinstance(other, Alloc):
return False
return self.root == other.root
def __contains__(self, address: Address | FixedSizeBytesConvertible) -> bool:
"""Check if an account is in the allocation."""
if not isinstance(address, Address):
address = Address(address)
return address in self.root
def empty_accounts(self) -> List[Address]:
"""Return list of addresses of empty accounts."""
return [address for address, account in self.root.items() if not account]
def state_root(self) -> Hash:
"""Return state root of the allocation."""
state = State()
for address, account in self.root.items():
if account is None:
continue
set_account(
state=state,
address=FrontierAddress(address),
account=FrontierAccount(
nonce=Uint(account.nonce) if account.nonce is not None else Uint(0),
balance=(U256(account.balance) if account.balance is not None else U256(0)),
code=account.code if account.code is not None else b"",
),
)
if account.storage is not None:
for key, value in account.storage.root.items():
set_storage(
state=state,
address=FrontierAddress(address),
key=Bytes32(Hash(key)),
value=U256(value),
)
return Hash(state_root(state))
def verify_post_alloc(self, got_alloc: "Alloc") -> None:
"""
Verify that the allocation matches the expected post in the test.
Raises exception on unexpected values.
"""
assert isinstance(got_alloc, Alloc), f"got_alloc is not an Alloc: {got_alloc}"
for address, account in self.root.items():
if account is None:
# Account must not exist
if address in got_alloc.root and got_alloc.root[address] is not None:
raise Alloc.UnexpectedAccountError(
address=address, account=got_alloc.root[address]
)
else:
if address in got_alloc.root:
got_account = got_alloc.root[address]
assert isinstance(got_account, Account)
assert isinstance(account, Account)
account.check_alloc(address, got_account)
else:
raise Alloc.MissingAccountError(address=address)
def deploy_contract(
self,
code: BytesConvertible,
*,
storage: Storage | StorageRootType | None = None,
balance: NumberConvertible = 0,
nonce: NumberConvertible = 1,
address: Address | None = None,
evm_code_type: EVMCodeType | None = None,
label: str | None = None,
stub: str | None = None,
) -> Address:
"""Deploy a contract to the allocation."""
raise NotImplementedError("deploy_contract is not implemented in the base class")
def fund_eoa(
self,
amount: NumberConvertible | None = None,
label: str | None = None,
storage: Storage | None = None,
delegation: Address | Literal["Self"] | None = None,
nonce: NumberConvertible | None = None,
) -> EOA:
"""
Add a previously unused EOA to the pre-alloc with the balance specified
by `amount`.
"""
raise NotImplementedError("fund_eoa is not implemented in the base class")
def fund_address(self, address: Address, amount: NumberConvertible) -> None:
"""
Fund an address with a given amount.
If the address is already present in the pre-alloc the amount will be
added to its existing balance.
"""
raise NotImplementedError("fund_address is not implemented in the base class")
def empty_account(self) -> Address:
"""
Return a previously unused account guaranteed to be empty.
This ensures the account has zero balance, zero nonce, no code, and no
storage. The account is not a precompile or a system contract.
"""
raise NotImplementedError("empty_account is not implemented in the base class")
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/chain_config_types.py | src/ethereum_test_types/chain_config_types.py | """Chain Configuration related types for Ethereum tests."""
from pydantic import Field
from ethereum_test_base_types import CamelModel
class ChainConfigDefaults:
"""
Default values for the chain configuration.
Can be modified by modules that import this module and want to override the
default values.
"""
chain_id: int = 1
class ChainConfig(CamelModel):
"""Chain configuration."""
chain_id: int = Field(
default_factory=lambda: ChainConfigDefaults.chain_id, validate_default=True
)
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/transaction_types.py | src/ethereum_test_types/transaction_types.py | """Transaction-related types for Ethereum tests."""
from dataclasses import dataclass
from enum import IntEnum
from functools import cached_property
from typing import Any, ClassVar, Dict, Generic, List, Literal, Sequence
import ethereum_rlp as eth_rlp
from coincurve.keys import PrivateKey, PublicKey
from ethereum_types.numeric import Uint
from pydantic import (
AliasChoices,
BaseModel,
ConfigDict,
Field,
computed_field,
model_serializer,
model_validator,
)
from trie import HexaryTrie
from ethereum_test_base_types import (
AccessList,
Address,
Bytes,
CamelModel,
Hash,
HexNumber,
NumberBoundTypeVar,
RLPSerializable,
SignableRLPSerializable,
TestAddress,
TestPrivateKey,
)
from ethereum_test_exceptions import TransactionException
from pytest_plugins.custom_logging import get_logger
from .account_types import EOA
from .blob_types import Blob
from .chain_config_types import ChainConfigDefaults
from .phase_manager import TestPhase, TestPhaseManager
from .receipt_types import TransactionReceipt
from .utils import int_to_bytes, keccak256
logger = get_logger(__name__)
class TransactionType(IntEnum):
"""Transaction types."""
LEGACY = 0
ACCESS_LIST = 1
BASE_FEE = 2
BLOB_TRANSACTION = 3
SET_CODE = 4
@dataclass
class TransactionDefaults:
"""Default values for transactions."""
gas_price = 10
max_fee_per_gas = 7
max_priority_fee_per_gas: int = 0
class AuthorizationTupleGeneric(CamelModel, Generic[NumberBoundTypeVar], SignableRLPSerializable):
"""Authorization tuple for transactions."""
chain_id: NumberBoundTypeVar = Field(0) # type: ignore
address: Address
nonce: NumberBoundTypeVar = Field(0) # type: ignore
v: NumberBoundTypeVar = Field(default=0, validation_alias=AliasChoices("v", "yParity")) # type: ignore
r: NumberBoundTypeVar = Field(0) # type: ignore
s: NumberBoundTypeVar = Field(0) # type: ignore
magic: ClassVar[int] = 0x05
rlp_fields: ClassVar[List[str]] = ["chain_id", "address", "nonce", "v", "r", "s"]
rlp_signing_fields: ClassVar[List[str]] = ["chain_id", "address", "nonce"]
def get_rlp_signing_prefix(self) -> bytes:
"""
Return a prefix that has to be appended to the serialized signing
object.
By default, an empty string is returned.
"""
return self.magic.to_bytes(1, byteorder="big")
@model_serializer(mode="wrap", when_used="json-unless-none")
def duplicate_v_as_y_parity(self, serializer: Any) -> Any:
"""
Add a duplicate 'yParity' field (same as `v`) in JSON fixtures.
Background: https://github.com/erigontech/erigon/issues/14073
"""
data = serializer(self)
if "v" in data and data["v"] is not None:
data["yParity"] = data["v"]
return data
class AuthorizationTuple(AuthorizationTupleGeneric[HexNumber]):
"""Authorization tuple for transactions."""
signer: EOA | None = None
secret_key: Hash | None = None
def model_post_init(self, __context: Any) -> None:
"""
Automatically signs the authorization tuple if a secret key or sender
are provided.
"""
super().model_post_init(__context)
self.sign()
def sign(self: "AuthorizationTuple") -> None:
"""Signs the authorization tuple with a private key."""
signature_bytes: bytes | None = None
rlp_signing_bytes = self.rlp_signing_bytes()
if (
"v" not in self.model_fields_set
and "r" not in self.model_fields_set
and "s" not in self.model_fields_set
):
signing_key: Hash | None = None
if self.secret_key is not None:
signing_key = self.secret_key
elif self.signer is not None:
eoa = self.signer
assert eoa is not None, "signer must be set"
signing_key = eoa.key
assert signing_key is not None, "secret_key or signer must be set"
signature_bytes = PrivateKey(secret=signing_key).sign_recoverable(
rlp_signing_bytes, hasher=keccak256
)
self.v, self.r, self.s = (
HexNumber(signature_bytes[64]),
HexNumber(int.from_bytes(signature_bytes[0:32], byteorder="big")),
HexNumber(int.from_bytes(signature_bytes[32:64], byteorder="big")),
)
self.model_fields_set.add("v")
self.model_fields_set.add("r")
self.model_fields_set.add("s")
if self.signer is None:
try:
if not signature_bytes:
signature_bytes = (
int(self.r).to_bytes(32, byteorder="big")
+ int(self.s).to_bytes(32, byteorder="big")
+ bytes([self.v])
)
public_key = PublicKey.from_signature_and_message(
signature_bytes, rlp_signing_bytes.keccak256(), hasher=None
)
self.signer = EOA(
address=Address(keccak256(public_key.format(compressed=False)[1:])[32 - 20 :])
)
except Exception:
# Signer remains `None` in this case
pass
class TransactionGeneric(BaseModel, Generic[NumberBoundTypeVar]):
"""
Generic transaction type used as a parent for Transaction and
FixtureTransaction (blockchain).
"""
ty: NumberBoundTypeVar = Field(0, alias="type") # type: ignore
chain_id: NumberBoundTypeVar = Field(
default_factory=lambda: ChainConfigDefaults.chain_id, validate_default=True
) # type: ignore
nonce: NumberBoundTypeVar = Field(0) # type: ignore
gas_price: NumberBoundTypeVar | None = None
max_priority_fee_per_gas: NumberBoundTypeVar | None = None
max_fee_per_gas: NumberBoundTypeVar | None = None
gas_limit: NumberBoundTypeVar = Field(21_000) # type: ignore
to: Address | None = None
value: NumberBoundTypeVar = Field(0) # type: ignore
data: Bytes = Field(Bytes(b""))
access_list: List[AccessList] | None = None
max_fee_per_blob_gas: NumberBoundTypeVar | None = None
blob_versioned_hashes: Sequence[Hash] | None = None
v: NumberBoundTypeVar = Field(0) # type: ignore
r: NumberBoundTypeVar = Field(0) # type: ignore
s: NumberBoundTypeVar = Field(0) # type: ignore
sender: EOA | None = None
def metadata_string(self) -> str | None:
"""Return the metadata field as a formatted json string or None."""
return None
class TransactionValidateToAsEmptyString(CamelModel):
"""Handler to validate the `to` field from an empty string."""
@model_validator(mode="before")
@classmethod
def validate_to_as_empty_string(cls, data: Any) -> Any:
"""
If the `to` field is an empty string, set the model value to None.
"""
if (
isinstance(data, dict)
and "to" in data
and isinstance(data["to"], str)
and data["to"] == ""
):
data["to"] = None
return data
class TransactionFixtureConverter(TransactionValidateToAsEmptyString):
"""
Handler for serializing and validating the `to` field as an empty string.
"""
@model_serializer(mode="wrap", when_used="json-unless-none")
def serialize_to_as_empty_string(self, serializer: Any) -> Any:
"""
Serialize the `to` field as the empty string if the model value is
None.
"""
default = serializer(self)
if default is not None and "to" not in default:
default["to"] = ""
return default
class TransactionTransitionToolConverter(TransactionValidateToAsEmptyString):
"""
Handler for serializing and validating the `to` field as an empty string.
"""
@model_serializer(mode="wrap", when_used="json-unless-none")
def serialize_to_as_none(self, serializer: Any) -> Any:
"""
Serialize the `to` field as `None` if the model value is None.
This is required as we use `exclude_none=True` when serializing, but
the t8n tool explicitly requires a value of `None` (respectively null),
for if the `to` field should be unset (contract creation).
"""
default = serializer(self)
if default is not None and "to" not in default:
default["to"] = None
return default
class TransactionTestMetadata(CamelModel):
"""Represents the metadata for a transaction."""
test_id: str | None = None
phase: str | None = None
action: str | None = None # e.g. deploy / fund / execute
target: str | None = None # account/contract label
tx_index: int | None = None # index within this phase
def to_json(self) -> str:
"""
Convert the transaction metadata into json string for it to be embedded
in the request id.
"""
return self.model_dump_json(exclude_none=True, by_alias=True)
class Transaction(
TransactionGeneric[HexNumber], TransactionTransitionToolConverter, SignableRLPSerializable
):
"""Generic object that can represent all Ethereum transaction types."""
gas_limit: HexNumber = Field(HexNumber(21_000), serialization_alias="gas")
to: Address | None = Field(Address(0xAA))
data: Bytes = Field(Bytes(b""), alias="input")
authorization_list: List[AuthorizationTuple] | None = None
initcodes: List[Bytes] | None = None
secret_key: Hash | None = None
error: List[TransactionException] | TransactionException | None = Field(None, exclude=True)
protected: bool = Field(True, exclude=True)
expected_receipt: TransactionReceipt | None = Field(None, exclude=True)
zero: ClassVar[Literal[0]] = 0
metadata: TransactionTestMetadata | None = Field(None, exclude=True)
test_phase: TestPhase | None = Field(
default_factory=TestPhaseManager.get_current_phase, exclude=True
)
model_config = ConfigDict(validate_assignment=True)
class InvalidFeePaymentError(Exception):
"""Transaction described more than one fee payment type."""
def __str__(self) -> str:
"""Print exception string."""
return "only one type of fee payment field can be used in a single tx"
class InvalidSignaturePrivateKeyError(Exception):
"""
Transaction describes both the signature and private key of source
account.
"""
def __str__(self) -> str:
"""Print exception string."""
return "can't define both 'signature' and 'private_key'"
def model_post_init(self, __context: Any) -> None:
"""Ensure transaction has no conflicting properties."""
super().model_post_init(__context)
if self.gas_price is not None and (
self.max_fee_per_gas is not None
or self.max_priority_fee_per_gas is not None
or self.max_fee_per_blob_gas is not None
):
raise Transaction.InvalidFeePaymentError()
if "ty" not in self.model_fields_set:
# Try to deduce transaction type from included fields
if self.initcodes is not None:
self.ty = HexNumber(6)
elif self.authorization_list is not None:
self.ty = HexNumber(4)
elif self.max_fee_per_blob_gas is not None or self.blob_versioned_hashes is not None:
self.ty = HexNumber(3)
elif self.max_fee_per_gas is not None or self.max_priority_fee_per_gas is not None:
self.ty = HexNumber(2)
elif self.access_list is not None:
self.ty = HexNumber(1)
else:
self.ty = HexNumber(0)
if "v" in self.model_fields_set and self.secret_key is not None:
raise Transaction.InvalidSignaturePrivateKeyError()
if "v" not in self.model_fields_set and self.secret_key is None:
if self.sender is not None:
self.secret_key = self.sender.key
else:
self.secret_key = Hash(TestPrivateKey)
self.sender = EOA(address=TestAddress, key=self.secret_key, nonce=0)
# Set default values for fields that are required for certain tx types
if self.ty <= 1 and self.gas_price is None:
self.gas_price = HexNumber(TransactionDefaults.gas_price)
if self.ty >= 1 and self.access_list is None:
self.access_list = []
if self.ty < 1:
assert self.access_list is None, "access_list must be None"
if self.ty >= 2 and self.max_fee_per_gas is None:
self.max_fee_per_gas = HexNumber(TransactionDefaults.max_fee_per_gas)
if self.ty >= 2 and self.max_priority_fee_per_gas is None:
self.max_priority_fee_per_gas = HexNumber(TransactionDefaults.max_priority_fee_per_gas)
if self.ty < 2:
assert self.max_fee_per_gas is None, "max_fee_per_gas must be None"
assert self.max_priority_fee_per_gas is None, "max_priority_fee_per_gas must be None"
if self.ty == 3 and self.max_fee_per_blob_gas is None:
self.max_fee_per_blob_gas = HexNumber(1)
if self.ty != 3:
assert self.blob_versioned_hashes is None, "blob_versioned_hashes must be None"
assert self.max_fee_per_blob_gas is None, "max_fee_per_blob_gas must be None"
if self.ty == 4 and self.authorization_list is None:
self.authorization_list = []
if self.ty != 4:
assert self.authorization_list is None, "authorization_list must be None"
if self.ty == 6 and self.initcodes is None:
self.initcodes = []
if self.ty != 6:
assert self.initcodes is None, "initcodes must be None"
if "nonce" not in self.model_fields_set and self.sender is not None:
self.nonce = HexNumber(self.sender.get_nonce())
def with_error(
self, error: List[TransactionException] | TransactionException
) -> "Transaction":
"""Create a copy of the transaction with an added error."""
return self.copy(error=error)
def with_nonce(self, nonce: int) -> "Transaction":
"""Create a copy of the transaction with a modified nonce."""
return self.copy(nonce=nonce)
@cached_property
def signature_bytes(self) -> Bytes:
"""Returns the serialized bytes of the transaction signature."""
assert "v" in self.model_fields_set, "transaction must be signed"
v = int(self.v)
if self.ty == 0:
if self.protected:
assert self.chain_id is not None
v -= 35 + (self.chain_id * 2)
else:
v -= 27
return Bytes(
self.r.to_bytes(32, byteorder="big")
+ self.s.to_bytes(32, byteorder="big")
+ bytes([v])
)
def sign(self: "Transaction") -> None:
"""Signs the authorization tuple with a private key."""
signature_bytes: bytes | None = None
rlp_signing_bytes = self.rlp_signing_bytes()
if (
"v" not in self.model_fields_set
and "r" not in self.model_fields_set
and "s" not in self.model_fields_set
):
signing_key: Hash | None = None
if self.secret_key is not None:
signing_key = self.secret_key
self.secret_key = None
elif self.sender is not None:
eoa = self.sender
assert eoa is not None, "signer must be set"
signing_key = eoa.key
assert signing_key is not None, "secret_key or signer must be set"
signature_bytes = PrivateKey(secret=signing_key).sign_recoverable(
rlp_signing_bytes, hasher=keccak256
)
v, r, s = (
signature_bytes[64],
int.from_bytes(signature_bytes[0:32], byteorder="big"),
int.from_bytes(signature_bytes[32:64], byteorder="big"),
)
if self.ty == 0:
if self.protected:
v += 35 + (self.chain_id * 2)
else: # not protected
v += 27
self.v, self.r, self.s = (HexNumber(v), HexNumber(r), HexNumber(s))
self.model_fields_set.add("v")
self.model_fields_set.add("r")
self.model_fields_set.add("s")
if self.sender is None:
try:
if not signature_bytes:
v = self.v
if self.ty == 0:
if v > 28:
v -= 35 + (self.chain_id * 2)
else: # not protected
v -= 27
signature_bytes = (
int(self.r).to_bytes(32, byteorder="big")
+ int(self.s).to_bytes(32, byteorder="big")
+ bytes([v])
)
public_key = PublicKey.from_signature_and_message(
signature_bytes, rlp_signing_bytes.keccak256(), hasher=None
)
self.sender = EOA(
address=Address(keccak256(public_key.format(compressed=False)[1:])[32 - 20 :])
)
except Exception:
# Signer remains `None` in this case
pass
def with_signature_and_sender(self, *, keep_secret_key: bool = False) -> "Transaction":
"""Return signed version of the transaction using the private key."""
updated_values: Dict[str, Any] = {}
if (
"v" in self.model_fields_set
or "r" in self.model_fields_set
or "s" in self.model_fields_set
):
# Transaction already signed
if self.sender is not None:
return self
public_key = PublicKey.from_signature_and_message(
self.signature_bytes, self.rlp_signing_bytes().keccak256(), hasher=None
)
updated_values["sender"] = Address(
keccak256(public_key.format(compressed=False)[1:])[32 - 20 :]
)
return self.copy(**updated_values)
if self.secret_key is None:
raise ValueError("secret_key must be set to sign a transaction")
# Get the signing bytes
signing_hash = self.rlp_signing_bytes().keccak256()
# Sign the bytes
signature_bytes = PrivateKey(secret=self.secret_key).sign_recoverable(
signing_hash, hasher=None
)
public_key = PublicKey.from_signature_and_message(
signature_bytes, signing_hash, hasher=None
)
sender = keccak256(public_key.format(compressed=False)[1:])[32 - 20 :]
updated_values["sender"] = Address(sender)
v, r, s = (
signature_bytes[64],
int.from_bytes(signature_bytes[0:32], byteorder="big"),
int.from_bytes(signature_bytes[32:64], byteorder="big"),
)
if self.ty == 0:
if self.protected:
v += 35 + (self.chain_id * 2)
else: # not protected
v += 27
updated_values["v"] = HexNumber(v)
updated_values["r"] = HexNumber(r)
updated_values["s"] = HexNumber(s)
updated_values["secret_key"] = None
updated_tx: "Transaction" = self.model_copy(update=updated_values)
# Remove the secret key if requested
if keep_secret_key:
updated_tx.secret_key = self.secret_key
return updated_tx
def get_rlp_signing_fields(self) -> List[str]:
"""
Return the list of values included in the envelope used for signing
depending on the transaction type.
"""
field_list: List[str]
if self.ty == 6:
# EIP-7873: https://eips.ethereum.org/EIPS/eip-7873
field_list = [
"chain_id",
"nonce",
"max_priority_fee_per_gas",
"max_fee_per_gas",
"gas_limit",
"to",
"value",
"data",
"access_list",
"initcodes",
]
elif self.ty == 4:
# EIP-7702: https://eips.ethereum.org/EIPS/eip-7702
field_list = [
"chain_id",
"nonce",
"max_priority_fee_per_gas",
"max_fee_per_gas",
"gas_limit",
"to",
"value",
"data",
"access_list",
"authorization_list",
]
elif self.ty == 3:
# EIP-4844: https://eips.ethereum.org/EIPS/eip-4844
field_list = [
"chain_id",
"nonce",
"max_priority_fee_per_gas",
"max_fee_per_gas",
"gas_limit",
"to",
"value",
"data",
"access_list",
"max_fee_per_blob_gas",
"blob_versioned_hashes",
]
elif self.ty == 2:
# EIP-1559: https://eips.ethereum.org/EIPS/eip-1559
field_list = [
"chain_id",
"nonce",
"max_priority_fee_per_gas",
"max_fee_per_gas",
"gas_limit",
"to",
"value",
"data",
"access_list",
]
elif self.ty == 1:
# EIP-2930: https://eips.ethereum.org/EIPS/eip-2930
field_list = [
"chain_id",
"nonce",
"gas_price",
"gas_limit",
"to",
"value",
"data",
"access_list",
]
elif self.ty == 0:
field_list = ["nonce", "gas_price", "gas_limit", "to", "value", "data"]
if self.protected:
# EIP-155: https://eips.ethereum.org/EIPS/eip-155
field_list.extend(["chain_id", "zero", "zero"])
else:
raise NotImplementedError(f"signing for transaction type {self.ty} not implemented")
for field in field_list:
if field != "to":
assert getattr(self, field) is not None, (
f"{field} must be set for type {self.ty} tx"
)
return field_list
def get_rlp_fields(self) -> List[str]:
"""
Return the list of values included in the list used for rlp encoding
depending on the transaction type.
"""
fields = self.get_rlp_signing_fields()
if self.ty == 0 and self.protected:
fields = fields[:-3]
return fields + ["v", "r", "s"]
def get_rlp_prefix(self) -> bytes:
"""
Return the transaction type as bytes to be appended at the beginning of
the serialized transaction if type is not 0.
"""
if self.ty > 0:
return bytes([self.ty])
return b""
def get_rlp_signing_prefix(self) -> bytes:
"""
Return the transaction type as bytes to be appended at the beginning of
the serialized transaction signing envelope if type is not 0.
"""
if self.ty > 0:
return bytes([self.ty])
return b""
def metadata_string(self) -> str | None:
"""Return the metadata field as a formatted json string or None."""
if self.metadata is None:
return None
return self.metadata.to_json()
@cached_property
def hash(self) -> Hash:
"""Returns hash of the transaction."""
return self.rlp().keccak256()
@cached_property
def serializable_list(self) -> Any:
"""
Return list of values included in the transaction as a serializable
object.
"""
return self.rlp() if self.ty > 0 else self.to_list(signing=False)
@staticmethod
def list_root(input_txs: List["Transaction"]) -> Hash:
"""Return transactions root of a list of transactions."""
t = HexaryTrie(db={})
for i, tx in enumerate(input_txs):
t.set(eth_rlp.encode(Uint(i)), tx.rlp())
return Hash(t.root_hash)
@staticmethod
def list_blob_versioned_hashes(input_txs: List["Transaction"]) -> List[Hash]:
"""
Get list of ordered blob versioned hashes from a list of transactions.
"""
return [
blob_versioned_hash
for tx in input_txs
if tx.blob_versioned_hashes is not None
for blob_versioned_hash in tx.blob_versioned_hashes
]
@cached_property
def created_contract(self) -> Address:
"""Return address of the contract created by the transaction."""
if self.to is not None:
raise ValueError("transaction is not a contract creation")
if self.sender is None:
raise ValueError("sender address is None")
hash_bytes = Bytes(eth_rlp.encode([self.sender, int_to_bytes(self.nonce)])).keccak256()
return Address(hash_bytes[-20:])
class NetworkWrappedTransaction(CamelModel, RLPSerializable):
"""
Network wrapped transaction as defined in
[EIP-4844](https://eips.ethereum.org/EIPS/eip-4844#networking).
< Osaka: rlp([tx_payload_body, blobs, commitments, proofs])
>= Osaka: rlp([tx_payload_body, wrapper_version, blobs, commitments,
cell_proofs])
"""
tx: Transaction
blob_objects: Sequence[Blob]
wrapper_version: int | None = None # only exists in >= osaka
@computed_field # type: ignore[prop-decorator]
@property
def blobs(self) -> Sequence[Bytes]:
"""Return a list of blob data as bytes."""
return [blob.data for blob in self.blob_objects]
@computed_field # type: ignore[prop-decorator]
@property
def commitments(self) -> Sequence[Bytes]:
"""Return a list of kzg commitments."""
return [blob.commitment for blob in self.blob_objects]
@computed_field # type: ignore[prop-decorator]
@property
def proofs(self) -> Sequence[Bytes] | None:
"""Return a list of kzg proofs (returns None >= Osaka)."""
if self.wrapper_version is not None:
return None
proofs: list[Bytes] = []
for blob in self.blob_objects:
assert isinstance(blob.proof, Bytes)
proofs.append(blob.proof)
return proofs
@computed_field # type: ignore[prop-decorator]
@property
def cell_proofs(self) -> Sequence[Bytes] | None:
"""Return a list of cells (returns None < Osaka)."""
if self.wrapper_version is None:
return None
cells: list[Bytes] = []
for blob in self.blob_objects:
assert isinstance(blob.proof, list)
cells.extend(blob.proof)
return cells
def get_rlp_fields(self) -> List[str]:
"""
Return an ordered list of field names to be included in RLP
serialization.
Function can be overridden to customize the logic to return the fields.
By default, rlp_fields class variable is used.
The list can be nested list up to one extra level to represent nested
fields.
"""
# only put a wrapper_version field for >=osaka (value 1), otherwise
# omit field
wrapper = []
if self.wrapper_version is not None:
wrapper = ["wrapper_version"]
rlp_proofs: list[str] = []
if self.proofs is not None:
rlp_proofs = ["proofs"]
rlp_cell_proofs: list[str] = []
if self.cell_proofs is not None:
rlp_cell_proofs = ["cell_proofs"]
rlp_fields: List[str] = [ # structure explained in
# https://eips.ethereum.org/EIPS/eip-7594#Networking
"tx", # tx_payload_body
*wrapper, # wrapper_version, which is always 1 for osaka (was non-
# existing before)
"blobs", # Blob.data
"commitments",
*rlp_proofs,
*rlp_cell_proofs,
]
assert ("proofs" in rlp_fields) or ("cell_proofs" in rlp_fields), (
"Neither proofs nor cell_proofs are in rlp_fields. Critical error!"
)
# logger.debug(f"Ended up with this rlp field list: {rlp_fields}")
return rlp_fields
def get_rlp_prefix(self) -> bytes:
"""
Return the transaction type as bytes to be appended at the beginning of
the serialized transaction if type is not 0.
"""
if self.tx.ty > 0:
return bytes([self.tx.ty])
return b""
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/request_types.py | src/ethereum_test_types/request_types.py | """Request types for Ethereum tests."""
from abc import abstractmethod
from collections import defaultdict
from typing import ClassVar, Dict, List, SupportsBytes
from ethereum_test_base_types import (
Address,
BLSPublicKey,
BLSSignature,
Bytes,
CamelModel,
Hash,
HexNumber,
)
class RequestBase:
"""Base class for requests."""
type: ClassVar[int]
@abstractmethod
def __bytes__(self) -> bytes:
"""Return request's attributes as bytes."""
...
class DepositRequest(RequestBase, CamelModel):
"""Deposit Request type."""
pubkey: BLSPublicKey
"""The public key of the beacon chain validator."""
withdrawal_credentials: Hash
"""The withdrawal credentials of the beacon chain validator."""
amount: HexNumber
"""The amount in gwei of the deposit."""
signature: BLSSignature
"""
The signature of the deposit using the validator's private key that matches
the `pubkey`.
"""
index: HexNumber
"""The index of the deposit."""
type: ClassVar[int] = 0
def __bytes__(self) -> bytes:
"""Return deposit's attributes as bytes."""
return (
bytes(self.pubkey)
+ bytes(self.withdrawal_credentials)
+ self.amount.to_bytes(8, "little")
+ bytes(self.signature)
+ self.index.to_bytes(8, "little")
)
class WithdrawalRequest(RequestBase, CamelModel):
"""Withdrawal Request type."""
source_address: Address = Address(0)
"""
The address of the execution layer account that made the withdrawal
request.
"""
validator_pubkey: BLSPublicKey
"""
The current public key of the validator as it currently is in the beacon
state.
"""
amount: HexNumber
"""The amount in gwei to be withdrawn on the beacon chain."""
type: ClassVar[int] = 1
def __bytes__(self) -> bytes:
"""Return withdrawal's attributes as bytes."""
return (
bytes(self.source_address)
+ bytes(self.validator_pubkey)
+ self.amount.to_bytes(8, "little")
)
class ConsolidationRequest(RequestBase, CamelModel):
"""Consolidation Request type."""
source_address: Address = Address(0)
"""
The address of the execution layer account that made the consolidation
request.
"""
source_pubkey: BLSPublicKey
"""
The public key of the source validator as it currently is in the beacon
state.
"""
target_pubkey: BLSPublicKey
"""
The public key of the target validator as it currently is in the beacon
state.
"""
type: ClassVar[int] = 2
def __bytes__(self) -> bytes:
"""Return consolidation's attributes as bytes."""
return bytes(self.source_address) + bytes(self.source_pubkey) + bytes(self.target_pubkey)
def requests_list_to_bytes(requests_list: List[RequestBase] | Bytes | SupportsBytes) -> Bytes:
"""Convert list of requests to bytes."""
if not isinstance(requests_list, list):
return Bytes(requests_list)
return Bytes(b"".join([bytes(r) for r in requests_list]))
class Requests:
"""Requests for the transition tool."""
requests_list: List[Bytes]
def __init__(
self,
*requests: RequestBase,
requests_lists: List[List[RequestBase] | Bytes] | None = None,
) -> None:
"""Initialize requests object."""
if requests_lists is not None:
assert len(requests) == 0, "requests must be empty if list is provided"
self.requests_list = []
for requests_list in requests_lists:
self.requests_list.append(requests_list_to_bytes(requests_list))
return
else:
lists: Dict[int, List[RequestBase]] = defaultdict(list)
for r in requests:
lists[r.type].append(r)
self.requests_list = [
Bytes(bytes([request_type]) + requests_list_to_bytes(lists[request_type]))
for request_type in sorted(lists.keys())
]
def __bytes__(self) -> bytes:
"""Return requests hash."""
s: bytes = b"".join(r.sha256() for r in self.requests_list)
return Bytes(s).sha256()
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/helpers.py | src/ethereum_test_types/helpers.py | """Helper functions/classes used to generate Ethereum tests."""
from typing import List, SupportsBytes
import ethereum_rlp as eth_rlp
from pydantic import BaseModel, ConfigDict
from ethereum_test_base_types.base_types import Address, Bytes, Hash
from ethereum_test_base_types.conversions import BytesConvertible, FixedSizeBytesConvertible
from ethereum_test_vm import Opcodes as Op
from .account_types import EOA
from .utils import int_to_bytes
"""
Helper functions
"""
def ceiling_division(a: int, b: int) -> int:
"""
Calculate ceil without using floating point. Used by many of the EVM's
formulas.
"""
return -(a // -b)
def compute_create_address(
*,
address: FixedSizeBytesConvertible | EOA,
nonce: int | None = None,
salt: int = 0,
initcode: BytesConvertible = b"",
opcode: Op = Op.CREATE,
) -> Address:
"""
Compute address of the resulting contract created using a transaction or
the `CREATE` opcode.
"""
if opcode == Op.CREATE:
if isinstance(address, EOA):
if nonce is None:
nonce = address.nonce
else:
address = Address(address)
if nonce is None:
nonce = 0
hash_bytes = Bytes(eth_rlp.encode([address, int_to_bytes(nonce)])).keccak256()
return Address(hash_bytes[-20:])
if opcode == Op.CREATE2:
return compute_create2_address(address, salt, initcode)
raise ValueError("Unsupported opcode")
def compute_create2_address(
address: FixedSizeBytesConvertible, salt: FixedSizeBytesConvertible, initcode: BytesConvertible
) -> Address:
"""
Compute address of the resulting contract created using the `CREATE2`
opcode.
"""
hash_bytes = Bytes(
b"\xff" + Address(address) + Hash(salt) + Bytes(initcode).keccak256()
).keccak256()
return Address(hash_bytes[-20:])
def compute_eofcreate_address(
address: FixedSizeBytesConvertible, salt: FixedSizeBytesConvertible
) -> Address:
"""
Compute address of the resulting contract created using the `EOFCREATE`
opcode.
"""
hash_bytes = Bytes(b"\xff" + b"\x00" * 12 + Address(address) + Hash(salt)).keccak256()
return Address(hash_bytes[-20:])
def add_kzg_version(
b_hashes: List[bytes | SupportsBytes | int | str], kzg_version: int
) -> List[Hash]:
"""Add Kzg Version to each blob hash."""
kzg_version_hex = bytes([kzg_version])
kzg_versioned_hashes = []
for b_hash in b_hashes:
b_hash = bytes(Hash(b_hash))
if isinstance(b_hash, int) or isinstance(b_hash, str):
kzg_versioned_hashes.append(Hash(kzg_version_hex + b_hash[1:]))
elif isinstance(b_hash, bytes) or isinstance(b_hash, SupportsBytes):
if isinstance(b_hash, SupportsBytes):
b_hash = bytes(b_hash)
kzg_versioned_hashes.append(Hash(kzg_version_hex + b_hash[1:]))
else:
raise TypeError("Blob hash must be either an integer, string or bytes")
return kzg_versioned_hashes
class TestParameterGroup(BaseModel):
"""
Base class for grouping test parameters in a dataclass. Provides a generic
__repr__ method to generate clean test ids, including only non-default
optional fields.
"""
__test__ = False # explicitly prevent pytest collecting this class
model_config = ConfigDict(frozen=True, validate_default=True)
def __repr__(self) -> str:
"""
Generate repr string, intended to be used as a test id, based on the
class name and the values of the non-default optional fields.
"""
class_name = self.__class__.__name__
field_strings = [
f"{field}_{value}"
# Include the field only if it is not optional or not set to its
# default value
for field, value in self.model_dump(exclude_defaults=True, exclude_unset=True).items()
]
return f"{class_name}_{'-'.join(field_strings)}"
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/utils.py | src/ethereum_test_types/utils.py | """Utility functions and sentinel classes for Ethereum test types."""
from typing import Any
from ethereum_test_base_types import Bytes, Hash
def keccak256(data: bytes) -> Hash:
"""Calculate keccak256 hash of the given data."""
return Bytes(data).keccak256()
def int_to_bytes(value: int) -> bytes:
"""Convert integer to its big-endian representation."""
if value == 0:
return b""
return int_to_bytes(value // 256) + bytes([value % 256])
# Sentinel classes
class Removable:
"""
Sentinel class to detect if a parameter should be removed.
(`None` normally means "do not modify").
"""
def __eq__(self, other: Any) -> bool:
"""Return True for all Removable."""
if not isinstance(other, Removable):
return NotImplemented
return True
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/blob_types.py | src/ethereum_test_types/blob_types.py | """Blob-related types for Ethereum tests."""
import random
from enum import Enum
from hashlib import sha256
from os.path import realpath
from pathlib import Path
from typing import Any, ClassVar, List, Literal, cast
import ckzg # type: ignore
import platformdirs
from filelock import FileLock
from ethereum_test_base_types.base_types import Bytes, Hash
from ethereum_test_base_types.pydantic import CamelModel
from ethereum_test_forks import Fork
from pytest_plugins.custom_logging import get_logger
CACHED_BLOBS_DIRECTORY: Path = (
Path(platformdirs.user_cache_dir("ethereum-execution-spec-tests")) / "cached_blobs"
)
logger = get_logger(__name__)
def clear_blob_cache(cached_blobs_folder_path: Path) -> None:
"""Delete all cached blobs."""
if not cached_blobs_folder_path.is_dir():
return
json_files = list(cached_blobs_folder_path.glob("*.json"))
for f in json_files:
lock_file_path = f.with_suffix(".lock")
try:
# get file lock for what you want to delete
with FileLock(lock_file_path):
f.unlink()
except Exception as e:
print(
f"Error while trying to delete file {f}:{e}. "
"Aborting clearing of blob cache folder."
)
return
class Blob(CamelModel):
"""Class representing a full blob."""
data: Bytes
commitment: Bytes
proof: List[Bytes] | Bytes # Bytes < Osaka, List[Bytes] >= Osaka
# None (in json: null) < Osaka, List[Bytes] >= Osaka
cells: List[Bytes] | None
versioned_hash: Hash
name: str
fork: Fork
seed: int
timestamp: int # fork transitions require timestamp >= 15000 to occur
_trusted_setup: ClassVar[Any | None] = None
@classmethod
def trusted_setup(cls) -> Any:
"""Set trusted setup if it is not already set."""
if cls._trusted_setup is None:
trusted_setup_path = Path(realpath(__file__)).parent / "kzg_trusted_setup.txt"
trusted_setup = ckzg.load_trusted_setup(str(trusted_setup_path), 0)
cls._trusted_setup = trusted_setup
return cls._trusted_setup
@staticmethod
def get_filename(fork: Fork, seed: int) -> str:
"""
Return filename this blob would have as string (with .json extension).
"""
amount_cell_proofs: int = cast(int, fork.get_blob_constant("AMOUNT_CELL_PROOFS"))
return "blob_" + str(seed) + "_cell_proofs_" + str(amount_cell_proofs) + ".json"
@staticmethod
def get_filepath(fork: Fork, seed: int) -> Path:
"""
Return the Path to the blob that would be created with these
parameters.
"""
# determine amount of cell proofs for this fork (0 or 128)
would_be_filename: str = Blob.get_filename(fork, seed)
# return path to blob
return CACHED_BLOBS_DIRECTORY / would_be_filename
@staticmethod
def from_fork(fork: Fork, seed: int = 0, timestamp: int = 0) -> "Blob":
"""
Construct Blob instances. Fork logic is encapsulated within nested
functions.
"""
def generate_blob_data(rng_seed: int = 0) -> Bytes:
"""Calculate blob data deterministically via provided seed."""
# create local (independent) RNG object seeded with rng_seed
rng = random.Random(rng_seed)
# generate blob
ints: list[int] = [
rng.randrange(cast(int, fork.get_blob_constant("BLS_MODULUS")))
for _ in range(cast(int, fork.get_blob_constant("FIELD_ELEMENTS_PER_BLOB")))
]
encoded: list[bytes] = [
i.to_bytes(
cast(int, fork.get_blob_constant("BYTES_PER_FIELD_ELEMENT")),
cast(Literal["big"], fork.get_blob_constant("KZG_ENDIANNESS")),
)
for i in ints
]
blob: bytes = b"".join(encoded) # without 0x
return Bytes(blob)
def get_versioned_hash(commitment: Bytes, version: int = 1) -> Hash:
"""Calculate versioned hash for a given blob."""
return Hash(bytes([version]) + sha256(commitment).digest()[1:])
def get_commitment(data: Bytes) -> Bytes:
"""
Take a blob and returns a cryptographic commitment to it.
Note: Each cell holds the exact same copy of this commitment.
"""
# sanity check
field_elements: int = cast(int, fork.get_blob_constant("FIELD_ELEMENTS_PER_BLOB"))
bytes_per_field: int = cast(int, fork.get_blob_constant("BYTES_PER_FIELD_ELEMENT"))
assert len(data) == field_elements * bytes_per_field, (
f"Expected blob of length "
f"{field_elements * bytes_per_field} but got blob of length {len(data)}"
)
# calculate commitment
commitment = ckzg.blob_to_kzg_commitment(data, Blob.trusted_setup())
assert len(commitment) == fork.get_blob_constant("BYTES_PER_COMMITMENT"), (
f"Expected {fork.get_blob_constant('BYTES_PER_COMMITMENT')} "
f"resulting commitments but got {len(commitment)} commitments"
)
return commitment
def get_proof(fork: Fork, data: Bytes) -> List[Bytes] | Bytes:
# determine whether this fork is <osaka or >= osaka by looking at
# amount of cell_proofs
amount_cell_proofs = fork.get_blob_constant("AMOUNT_CELL_PROOFS")
# cancun, prague
if amount_cell_proofs == 0:
z = 2 # 2 is one of many possible valid field elements z
# https://github.com/ethereum/consensus-specs/blob/ad884507f
# 7a1d5962cd3dfb5f7b3e41aab728c55/tests/core/pyspec/eth2spec/
# test/utils/kzg_tests.py#L58-L66)
z_valid_size: bytes = z.to_bytes(
cast(int, fork.get_blob_constant("BYTES_PER_FIELD_ELEMENT")), byteorder="big"
)
proof, _ = ckzg.compute_kzg_proof(data, z_valid_size, Blob.trusted_setup())
return proof
# >=osaka
if amount_cell_proofs == 128:
_, proofs = ckzg.compute_cells_and_kzg_proofs(
data, Blob.trusted_setup()
) # returns List[byte] of length 128
return proofs
raise AssertionError(
f"get_proof() has not been implemented yet for fork: {fork.name()}."
f"Got amount of cell proofs {amount_cell_proofs} but expected 128."
)
def get_cells(fork: Fork, data: Bytes) -> List[Bytes] | None:
# determine whether this fork is <osaka or >= osaka by looking at
# amount of cell_proofs
amount_cell_proofs = fork.get_blob_constant("AMOUNT_CELL_PROOFS")
# cancun, prague
if amount_cell_proofs == 0:
return None
# >=osaka
if amount_cell_proofs == 128:
cells, _ = ckzg.compute_cells_and_kzg_proofs(
data, Blob.trusted_setup()
) # returns List[byte] of length 128
return cells # List[bytes]
raise AssertionError(
f"get_cells() has not been implemented yet for fork: {fork.name()}. Got amount of "
f"cell proofs {amount_cell_proofs} but expected 128."
)
# first, create cached blobs dir if necessary
if not CACHED_BLOBS_DIRECTORY.exists():
CACHED_BLOBS_DIRECTORY.mkdir(
parents=True, exist_ok=True
) # create all necessary dirs on the way
# handle transition forks (blob related constants are needed and only
# available for normal forks)
fork = fork.fork_at(timestamp=timestamp)
# if this blob already exists then load from file. use lock
blob_location: Path = Blob.get_filepath(fork, seed)
# use lock to avoid race conditions
lock_file_path = blob_location.with_suffix(".lock")
with FileLock(lock_file_path):
if blob_location.exists():
logger.debug(f"Blob exists already, reading it from file {blob_location}")
return Blob.from_file(Blob.get_filename(fork, seed))
assert fork.supports_blobs(), f"Provided fork {fork.name()} does not support blobs!"
# get data for blob parameters
data: Bytes = generate_blob_data(seed)
commitment: Bytes = get_commitment(data)
proof: List[Bytes] | Bytes = get_proof(fork, data)
cells: List[Bytes] | None = get_cells(fork, data)
versioned_hash: Hash = get_versioned_hash(commitment)
name: str = Blob.get_filename(fork, seed)
blob = Blob(
data=data,
commitment=commitment,
proof=proof,
cells=cells,
versioned_hash=versioned_hash,
name=name,
fork=fork,
seed=seed,
timestamp=timestamp,
)
# for most effective caching temporarily persist every blob that is
# created in cache
blob.write_to_file()
return blob
@staticmethod
def from_file(file_name: str) -> "Blob":
"""
Read a .json file and reconstruct object it represents.
You can load a blob only via its filename (with or without .json
extension).
"""
# ensure filename was passed
assert file_name.startswith("blob_"), (
f"You provided an invalid blob filename. Expected it to start with 'blob_' "
f"but got: {file_name}"
)
if ".json" not in file_name:
file_name = file_name + ".json"
# determine path where this blob would be stored if it existed
blob_file_location = CACHED_BLOBS_DIRECTORY / file_name
# check whether blob exists
assert blob_file_location.exists(), (
f"Tried to load blob from file but {blob_file_location} does not exist"
)
# read blob from file
with open(blob_file_location, "r", encoding="utf-8") as f:
json_str: str = f.read()
# reconstruct and return blob object
return Blob.model_validate_json(json_str)
def write_to_file(self) -> None:
"""Take a blob object, serialize it and write it to disk as json."""
json_str = self.model_dump_json()
output_location = Blob.get_filepath(self.fork, self.seed)
# use lock to avoid race conditions
lock_file_path = output_location.with_suffix(".lock")
with FileLock(lock_file_path):
# warn if existing static_blob gets overwritten
if output_location.exists():
logger.debug(f"Blob {output_location} already exists. It will be overwritten.")
# overwrite existing
with open(output_location, "w", encoding="utf-8") as f:
f.write(json_str)
def verify_cell_kzg_proof_batch(self, cell_indices: list) -> bool:
"""
Check whether all cell proofs are valid and returns True only if that
is the case.
"""
amount_cell_proofs: int = cast(int, self.fork.get_blob_constant("AMOUNT_CELL_PROOFS"))
assert amount_cell_proofs > 0, (
f"verify_cell_kzg_proof_batch() is not available for your fork: {self.fork.name()}."
)
assert self.cells is not None, "self.cells is None, critical error."
assert len(cell_indices) == len(self.cells), (
f"Cell Indices list (detected length {len(cell_indices)}) and Cell list "
f"(detected length {len(self.cells)}) should have same length."
)
# each cell refers to the same commitment
commitments: list[bytes] = [self.commitment] * len(cell_indices)
is_valid = ckzg.verify_cell_kzg_proof_batch(
commitments, cell_indices, self.cells, self.proof, Blob.trusted_setup()
)
return is_valid
def delete_cells_then_recover_them(self, deletion_indices: list[int]) -> None:
"""
Simulate the cell recovery process in user-specified scenario.
Note: Requirement for successful reconstruction is having at least N of
the 2N cells.
Theoretical Usage: You pass a cell list with to 128 elements to this
function along with a list of deletion indices. These cells will be
deleted and then the ckzg recovery mechanism is used to repair the
missing cells. If no assertion is triggered the reconstruction was
successful.
"""
amount_cell_proofs: int = cast(int, self.fork.get_blob_constant("AMOUNT_CELL_PROOFS"))
assert amount_cell_proofs > 0, (
f"delete_cells_then_recover_them() is not available for fork: {self.fork.name()}"
)
assert self.cells is not None, "self.cells is None, critical problem."
assert isinstance(self.proof, list), (
"This function only works when self.proof is a list, but it seems to be "
" of type bytes (not a list)"
)
assert len(self.cells) == 128, (
f"You are supposed to pass a full cell list with 128 elements to this function, "
f"but got list of length {len(self.cells)}"
)
assert len(deletion_indices) < 129, (
f"You can't delete more than every cell (max len of deletion indices list is 128), "
f"but you passed a deletion indices list of length {len(deletion_indices)}"
)
for i in deletion_indices:
assert 0 <= i <= 127, f"Expected integers in range [0, 127], but got: {i}"
# delete cells
all_cell_indices: list[int] = list(range(128))
remaining_indices: list[int] = [i for i in all_cell_indices if i not in deletion_indices]
remaining_cells = [c for i, c in enumerate(self.cells) if i not in deletion_indices]
recovered_cells, recovered_proofs = ckzg.recover_cells_and_kzg_proofs(
remaining_indices, remaining_cells, Blob.trusted_setup()
) # on success returns two lists of len 128
# determine success/failure
assert len(recovered_cells) == len(self.cells), (
f"Failed to recover cell list. Original cell list had length {len(self.cells)} but "
f"recovered cell list has length {len(recovered_cells)}"
)
assert len(recovered_proofs) == len(self.proof), (
f"Failed to recover proofs list. Original proofs list had length {len(self.proof)} "
f"but recovered proofs list has length {len(recovered_proofs)}"
)
for i in range(len(recovered_cells)):
assert self.cells[i] == recovered_cells[i], (
f"Failed to correctly restore missing cells. At index {i} original cell was "
f"0x{self.cells[i].hex()} but reconstructed cell does not match: "
f"0x{recovered_cells[i].hex()}"
)
assert self.proof[i] == recovered_proofs[i], (
f"Failed to correctly restore missing proofs. At index {i} original proof was "
f"0x{self.proof[i].hex()} but reconstructed proof does not match: "
f"0x{recovered_proofs[i].hex()}"
)
class ProofCorruptionMode(Enum):
"""
Define what the proof corruption modes do.
For Osaka and later each Bytes object in the list is manipulated this
way.
"""
CORRUPT_FIRST_BYTE = 1 # corrupts a single byte (index 0)
CORRUPT_LAST_BYTE = 2 # corrupts a single byte (last valid index)
CORRUPT_TO_ALL_ZEROES = 3 # sets all proof bytes to 0
CORRUPT_ALL_BYTES = 4 # corrupts all bytes
def corrupt_proof(self, mode: ProofCorruptionMode) -> None:
"""Corrupt the proof field, supports different corruption modes."""
def corrupt_byte(b: bytes) -> Bytes:
"""
Bit-flip all bits of provided byte using XOR to guarantee change.
"""
if len(b) != 1:
raise ValueError("Input must be a single byte")
return Bytes(bytes([b[0] ^ 0xFF]))
# >=osaka
amount_cell_proofs: int = cast(int, self.fork.get_blob_constant("AMOUNT_CELL_PROOFS"))
if amount_cell_proofs > 0:
assert isinstance(self.proof, list), "proof was expected to be a list but it isn't"
if mode == self.ProofCorruptionMode.CORRUPT_FIRST_BYTE:
for i in range(len(self.proof)):
b: Bytes = self.proof[i]
corrupted: Bytes = Bytes(corrupt_byte(b[:1]) + b[1:])
self.proof[i] = corrupted
elif mode == self.ProofCorruptionMode.CORRUPT_LAST_BYTE:
for i in range(len(self.proof)):
b = self.proof[i]
corrupted = Bytes(b[:-1] + corrupt_byte(b[-1:]))
self.proof[i] = corrupted
elif mode == self.ProofCorruptionMode.CORRUPT_TO_ALL_ZEROES:
for i in range(len(self.proof)):
self.proof[i] = Bytes(bytes(len(self.proof[i])))
elif mode == self.ProofCorruptionMode.CORRUPT_ALL_BYTES:
for i in range(len(self.proof)):
b = self.proof[i]
corrupted_bytes = Bytes(b"".join(corrupt_byte(bytes([byte])) for byte in b))
self.proof[i] = corrupted_bytes
return
# pre-osaka (cancun and prague)
assert amount_cell_proofs == 0, (
f"You need to adjust corrupt_proof to handle fork {self.fork.name()}"
)
assert isinstance(self.proof, Bytes), "proof was expected to be Bytes but it isn't"
if mode == self.ProofCorruptionMode.CORRUPT_FIRST_BYTE:
self.proof = Bytes(corrupt_byte(self.proof[:1]) + self.proof[1:])
elif mode == self.ProofCorruptionMode.CORRUPT_LAST_BYTE:
self.proof = Bytes(self.proof[:-1] + corrupt_byte(self.proof[-1:]))
elif mode == self.ProofCorruptionMode.CORRUPT_TO_ALL_ZEROES:
self.proof = Bytes(bytes(len(self.proof)))
elif mode == self.ProofCorruptionMode.CORRUPT_ALL_BYTES:
self.proof = Bytes(b"".join(corrupt_byte(bytes([byte])) for byte in self.proof))
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/__init__.py | src/ethereum_test_types/__init__.py | """Common definitions and types."""
from .account_types import EOA, Alloc
from .blob_types import Blob
from .block_access_list import (
BalAccountChange,
BalBalanceChange,
BalCodeChange,
BalNonceChange,
BalStorageChange,
BalStorageSlot,
BlockAccessList,
BlockAccessListExpectation,
)
from .block_types import (
Environment,
EnvironmentDefaults,
Withdrawal,
)
from .chain_config_types import ChainConfig, ChainConfigDefaults
from .helpers import (
TestParameterGroup,
add_kzg_version,
ceiling_division,
compute_create2_address,
compute_create_address,
compute_eofcreate_address,
)
from .phase_manager import TestPhase, TestPhaseManager
from .receipt_types import TransactionReceipt
from .request_types import (
ConsolidationRequest,
DepositRequest,
Requests,
WithdrawalRequest,
)
from .transaction_types import (
AuthorizationTuple,
NetworkWrappedTransaction,
Transaction,
TransactionDefaults,
TransactionTestMetadata,
TransactionType,
)
from .utils import Removable, keccak256
__all__ = (
"Alloc",
"AuthorizationTuple",
"BalAccountChange",
"BalBalanceChange",
"BalCodeChange",
"BalNonceChange",
"BalStorageChange",
"BalStorageSlot",
"Blob",
"BlockAccessList",
"BlockAccessListExpectation",
"ChainConfig",
"ChainConfigDefaults",
"ConsolidationRequest",
"DepositRequest",
"Environment",
"EnvironmentDefaults",
"EOA",
"NetworkWrappedTransaction",
"Removable",
"Requests",
"TestParameterGroup",
"TestPhase",
"TestPhaseManager",
"Transaction",
"TransactionDefaults",
"TransactionReceipt",
"TransactionTestMetadata",
"TransactionType",
"Withdrawal",
"WithdrawalRequest",
"add_kzg_version",
"ceiling_division",
"compute_create_address",
"compute_create2_address",
"compute_eofcreate_address",
"keccak256",
)
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/trie.py | src/ethereum_test_types/trie.py | """
The state trie is the structure responsible for storing.
"""
import copy
from dataclasses import dataclass, field
from typing import (
Callable,
Dict,
Generic,
List,
Mapping,
MutableMapping,
Optional,
Sequence,
Tuple,
TypeVar,
cast,
)
from Crypto.Hash import keccak
from ethereum_rlp import Extended, rlp
from ethereum_types.bytes import Bytes, Bytes20, Bytes32
from ethereum_types.frozen import slotted_freezable
from ethereum_types.numeric import U256, Uint
from typing_extensions import assert_type
@slotted_freezable
@dataclass
class FrontierAccount:
"""State associated with an address."""
nonce: Uint
balance: U256
code: Bytes
def keccak256(buffer: Bytes) -> Bytes32:
"""Compute the keccak256 hash of the input `buffer`."""
k = keccak.new(digest_bits=256)
return Bytes32(k.update(buffer).digest())
def encode_account(raw_account_data: FrontierAccount, storage_root: Bytes) -> Bytes:
"""
Encode `Account` dataclass.
Storage is not stored in the `Account` dataclass, so `Accounts` cannot be
encoded without providing a storage root.
"""
return rlp.encode(
(
raw_account_data.nonce,
raw_account_data.balance,
storage_root,
keccak256(raw_account_data.code),
)
)
# note: an empty trie (regardless of whether it is secured) has root:
# keccak256(RLP(b'')) ==
# 56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421
# also:
# keccak256(RLP(())) ==
# 1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347
# which is the sha3Uncles hash in block header with no uncles
EMPTY_TRIE_ROOT = Bytes32(
bytes.fromhex("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421")
)
Node = FrontierAccount | Bytes | Uint | U256 | None
K = TypeVar("K", bound=Bytes)
V = TypeVar(
"V",
Optional[FrontierAccount],
Bytes,
Uint,
U256,
)
@slotted_freezable
@dataclass
class LeafNode:
"""Leaf node in the Merkle Trie."""
rest_of_key: Bytes
value: Extended
@slotted_freezable
@dataclass
class ExtensionNode:
"""Extension node in the Merkle Trie."""
key_segment: Bytes
subnode: Extended
BranchSubnodes = Tuple[
Extended,
Extended,
Extended,
Extended,
Extended,
Extended,
Extended,
Extended,
Extended,
Extended,
Extended,
Extended,
Extended,
Extended,
Extended,
Extended,
]
@slotted_freezable
@dataclass
class BranchNode:
"""Branch node in the Merkle Trie."""
subnodes: BranchSubnodes
value: Extended
InternalNode = LeafNode | ExtensionNode | BranchNode
def encode_internal_node(node: Optional[InternalNode]) -> Extended:
"""
Encode a Merkle Trie node into its RLP form.
The RLP will then be serialized into a `Bytes` and hashed unless it is less
that 32 bytes when serialized.
This function also accepts `None`, representing the absence of a node,
which is encoded to `b""`.
"""
unencoded: Extended
match node:
case None:
unencoded = b""
case LeafNode():
unencoded = (
nibble_list_to_compact(node.rest_of_key, True),
node.value,
)
case ExtensionNode():
unencoded = (
nibble_list_to_compact(node.key_segment, False),
node.subnode,
)
case BranchNode():
unencoded = list(node.subnodes) + [node.value]
case _:
raise AssertionError(f"Invalid internal node type {type(node)}!")
encoded = rlp.encode(unencoded)
if len(encoded) < 32:
return unencoded
else:
return keccak256(encoded)
def encode_node(node: Node, storage_root: Optional[Bytes] = None) -> Bytes:
"""
Encode a Node for storage in the Merkle Trie.
Currently mostly an unimplemented stub.
"""
match node:
case FrontierAccount():
assert storage_root is not None
return encode_account(node, storage_root)
case U256():
return rlp.encode(node)
case Bytes():
return node
case _:
raise AssertionError(f"encoding for {type(node)} is not currently implemented")
@dataclass(slots=True)
class Trie(Generic[K, V]):
"""The Merkle Trie."""
secured: bool
default: V
_data: Dict[K, V] = field(default_factory=dict)
def copy_trie(trie: Trie[K, V]) -> Trie[K, V]:
"""
Create a copy of `trie`. Since only frozen objects may be stored in tries,
the contents are reused.
"""
return Trie(trie.secured, trie.default, copy.copy(trie._data))
def trie_set(trie: Trie[K, V], key: K, value: V) -> None:
"""
Store an item in a Merkle Trie.
This method deletes the key if `value == trie.default`, because the Merkle
Trie represents the default value by omitting it from the trie.
"""
if value == trie.default:
if key in trie._data:
del trie._data[key]
else:
trie._data[key] = value
def trie_get(trie: Trie[K, V], key: K) -> V:
"""
Get an item from the Merkle Trie.
This method returns `trie.default` if the key is missing.
"""
return trie._data.get(key, trie.default)
def common_prefix_length(a: Sequence, b: Sequence) -> int:
"""Find the longest common prefix of two sequences."""
for i in range(len(a)):
if i >= len(b) or a[i] != b[i]:
return i
return len(a)
def nibble_list_to_compact(x: Bytes, is_leaf: bool) -> Bytes:
"""
Compresses nibble-list into a standard byte array with a flag.
A nibble-list is a list of byte values no greater than `15`. The flag is
encoded in high nibble of the highest byte. The flag nibble can be broken
down into two two-bit flags.
Highest nibble::
+---+---+----------+--------+
| _ | _ | is_leaf | parity |
+---+---+----------+--------+
3 2 1 0
The lowest bit of the nibble encodes the parity of the length of the
remaining nibbles -- `0` when even and `1` when odd. The second lowest bit
is used to distinguish leaf and extension nodes. The other two bits are not
used.
"""
compact = bytearray()
if len(x) % 2 == 0: # ie even length
compact.append(16 * (2 * is_leaf))
for i in range(0, len(x), 2):
compact.append(16 * x[i] + x[i + 1])
else:
compact.append(16 * ((2 * is_leaf) + 1) + x[0])
for i in range(1, len(x), 2):
compact.append(16 * x[i] + x[i + 1])
return Bytes(compact)
def bytes_to_nibble_list(bytes_: Bytes) -> Bytes:
"""
Convert a `Bytes` into to a sequence of nibbles (bytes with value < 16).
"""
nibble_list = bytearray(2 * len(bytes_))
for byte_index, byte in enumerate(bytes_):
nibble_list[byte_index * 2] = (byte & 0xF0) >> 4
nibble_list[byte_index * 2 + 1] = byte & 0x0F
return Bytes(nibble_list)
def _prepare_trie(
trie: Trie[K, V],
get_storage_root: Optional[Callable[[Bytes20], Bytes32]] = None,
) -> Mapping[Bytes, Bytes]:
"""
Prepare the trie for root calculation. Removes values that are empty,
hashes the keys (if `secured == True`) and encodes all the nodes.
"""
mapped: MutableMapping[Bytes, Bytes] = {}
for preimage, value in trie._data.items():
if isinstance(value, FrontierAccount):
assert get_storage_root is not None
address = Bytes20(preimage)
encoded_value = encode_node(value, get_storage_root(address))
else:
encoded_value = encode_node(value)
if encoded_value == b"":
raise AssertionError
key: Bytes
if trie.secured:
# "secure" tries hash keys once before construction
key = keccak256(preimage)
else:
key = preimage
mapped[bytes_to_nibble_list(key)] = encoded_value
return mapped
def root(
trie: Trie[K, V],
get_storage_root: Optional[Callable[[Bytes20], Bytes32]] = None,
) -> Bytes32:
"""Compute the root of a modified merkle patricia trie (MPT)."""
obj = _prepare_trie(trie, get_storage_root)
root_node = encode_internal_node(patricialize(obj, Uint(0)))
if len(rlp.encode(root_node)) < 32:
return keccak256(rlp.encode(root_node))
else:
assert isinstance(root_node, Bytes)
return Bytes32(root_node)
def patricialize(obj: Mapping[Bytes, Bytes], level: Uint) -> Optional[InternalNode]:
"""
Structural composition function.
Used to recursively patricialize and merkleize a dictionary. Includes
memoization of the tree structure and hashes.
"""
if len(obj) == 0:
return None
arbitrary_key = next(iter(obj))
# if leaf node
if len(obj) == 1:
leaf = LeafNode(arbitrary_key[level:], obj[arbitrary_key])
return leaf
# prepare for extension node check by finding max j such that all keys in
# obj have the same key[i:j]
substring = arbitrary_key[level:]
prefix_length = len(substring)
for key in obj:
prefix_length = min(prefix_length, common_prefix_length(substring, key[level:]))
# finished searching, found another key at the current level
if prefix_length == 0:
break
# if extension node
if prefix_length > 0:
prefix = arbitrary_key[int(level) : int(level) + prefix_length]
return ExtensionNode(
prefix,
encode_internal_node(patricialize(obj, level + Uint(prefix_length))),
)
branches: List[MutableMapping[Bytes, Bytes]] = []
for _ in range(16):
branches.append({})
value = b""
for key in obj:
if len(key) == level:
# shouldn't ever have an account or receipt in an internal node
if isinstance(obj[key], (FrontierAccount, Uint)):
raise AssertionError
value = obj[key]
else:
branches[key[level]][key] = obj[key]
subnodes = tuple(
encode_internal_node(patricialize(branches[k], level + Uint(1))) for k in range(16)
)
return BranchNode(
cast(BranchSubnodes, assert_type(subnodes, Tuple[Extended, ...])),
value,
)
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/phase_manager.py | src/ethereum_test_types/phase_manager.py | """Test phase management for Ethereum tests."""
from contextlib import contextmanager
from enum import Enum
from typing import ClassVar, Iterator, Optional
class TestPhase(Enum):
"""Test phase for state and blockchain tests."""
SETUP = "setup"
EXECUTION = "execution"
class TestPhaseManager:
"""
Manages test phases for transactions and blocks.
This singleton class provides context managers for "setup" and
"execution" phases. Transactions automatically detect and tag
themselves with the current phase.
Usage:
with TestPhaseManager.setup():
# Transactions created here have test_phase = "setup"
setup_tx = Transaction(...)
with TestPhaseManager.execution():
# Transactions created here have test_phase = "execution"
benchmark_tx = Transaction(...)
"""
_current_phase: ClassVar[Optional[TestPhase]] = None
@classmethod
@contextmanager
def setup(cls) -> Iterator[None]:
"""Context manager for the setup phase of a benchmark test."""
old_phase = cls._current_phase
cls._current_phase = TestPhase.SETUP
try:
yield
finally:
cls._current_phase = old_phase
@classmethod
@contextmanager
def execution(cls) -> Iterator[None]:
"""Context manager for the execution phase of a test."""
old_phase = cls._current_phase
cls._current_phase = TestPhase.EXECUTION
try:
yield
finally:
cls._current_phase = old_phase
@classmethod
def get_current_phase(cls) -> Optional[TestPhase]:
"""Get the current test phase."""
return cls._current_phase
@classmethod
def reset(cls) -> None:
"""Reset the phase state to None (primarily for testing)."""
cls._current_phase = None
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/receipt_types.py | src/ethereum_test_types/receipt_types.py | """Transaction receipt and log types for Ethereum tests."""
from typing import List
from pydantic import Field
from ethereum_test_base_types import (
Address,
Bloom,
Bytes,
CamelModel,
Hash,
HexNumber,
)
class TransactionLog(CamelModel):
"""Transaction log."""
address: Address
topics: List[Hash]
data: Bytes
block_number: HexNumber
transaction_hash: Hash
transaction_index: HexNumber
block_hash: Hash
log_index: HexNumber
removed: bool
class ReceiptDelegation(CamelModel):
"""Transaction receipt set-code delegation."""
from_address: Address = Field(..., alias="from")
nonce: HexNumber
target: Address
class TransactionReceipt(CamelModel):
"""Transaction receipt."""
transaction_hash: Hash | None = None
gas_used: HexNumber | None = None
root: Bytes | None = None
status: HexNumber | None = None
cumulative_gas_used: HexNumber | None = None
logs_bloom: Bloom | None = None
logs: List[TransactionLog] | None = None
contract_address: Address | None = None
effective_gas_price: HexNumber | None = None
block_hash: Hash | None = None
transaction_index: HexNumber | None = None
blob_gas_used: HexNumber | None = None
blob_gas_price: HexNumber | None = None
delegations: List[ReceiptDelegation] | None = None
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/block_access_list/expectations.py | src/ethereum_test_types/block_access_list/expectations.py | """
Block Access List expectation classes for test validation.
This module contains classes for defining and validating expected
BAL values in tests.
"""
from typing import Any, Callable, ClassVar, Dict, List, Optional
from pydantic import Field, PrivateAttr
from ethereum_test_base_types import Address, CamelModel, StorageKey
from .account_absent_values import BalAccountAbsentValues
from .account_changes import (
BalAccountChange,
BalBalanceChange,
BalCodeChange,
BalNonceChange,
BalStorageSlot,
BlockAccessListChangeLists,
)
from .exceptions import BlockAccessListValidationError
from .t8n import BlockAccessList
class BalAccountExpectation(CamelModel):
"""
Represents expected changes to a specific account in a block.
Same as BalAccountChange but without the address field,
used for expectations.
"""
model_config = CamelModel.model_config | {"extra": "forbid"}
nonce_changes: List[BalNonceChange] = Field(
default_factory=list, description="List of expected nonce changes"
)
balance_changes: List[BalBalanceChange] = Field(
default_factory=list, description="List of expected balance changes"
)
code_changes: List[BalCodeChange] = Field(
default_factory=list, description="List of expected code changes"
)
storage_changes: List[BalStorageSlot] = Field(
default_factory=list, description="List of expected storage changes"
)
storage_reads: List[StorageKey] = Field(
default_factory=list, description="List of expected read storage slots"
)
absent_values: Optional[BalAccountAbsentValues] = Field(
default=None, description="Explicit absent value expectations using BalAccountAbsentValues"
)
_EMPTY: ClassVar[Optional["BalAccountExpectation"]] = None
@classmethod
def empty(cls) -> "BalAccountExpectation":
"""
Create an expectation that validates the account has NO changes.
This is distinct from `BalAccountExpectation()` with no fields set,
which is ambiguous and clashes with `model_fields_set` logic, and
will raise a clarifying error if used in expectations.
Returns:
A BalAccountExpectation instance with all change lists empty.
This uses a classvar to facilitate identity checks across
multiple expectation instances.
"""
if cls._EMPTY is None:
cls._EMPTY = cls(
nonce_changes=[],
balance_changes=[],
code_changes=[],
storage_changes=[],
storage_reads=[],
)
return cls._EMPTY
def compose(
*modifiers: Callable[["BlockAccessList"], "BlockAccessList"],
) -> Callable[["BlockAccessList"], "BlockAccessList"]:
"""Compose multiple modifiers into a single modifier."""
def composed(bal: BlockAccessList) -> BlockAccessList:
result = bal
for modifier in modifiers:
result = modifier(result)
return result
return composed
class BlockAccessListExpectation(CamelModel):
"""
Block Access List expectation model for test writing.
This model is used to define expected BAL values in tests. It supports:
- Partial validation (only checks explicitly set fields)
- Convenient test syntax with named parameters
- Verification against actual BAL from t8n
- Explicit exclusion of addresses (using None values)
Example:
# In test definition
expected_block_access_list = BlockAccessListExpectation(
account_expectations={
alice: BalAccountExpectation(
nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)]
),
bob: None, # Bob should NOT be in the BAL
}
)
"""
model_config = CamelModel.model_config | {"extra": "forbid"}
account_expectations: Dict[Address, BalAccountExpectation | None] = Field(
default_factory=dict, description="Expected account changes or exclusions to verify"
)
_modifier: Callable[["BlockAccessList"], "BlockAccessList"] | None = PrivateAttr(default=None)
def modify(
self, *modifiers: Callable[["BlockAccessList"], "BlockAccessList"]
) -> "BlockAccessListExpectation":
"""
Create a new expectation with a modifier for invalid test cases.
Args:
modifiers: One or more functions that take and return
a BlockAccessList
Returns:
A new BlockAccessListExpectation instance with
the modifiers applied
Example:
from ethereum_test_types.block_access_list.
modifiers import remove_nonces
expectation = BlockAccessListExpectation(
account_expectations={...}
).modify(remove_nonces(alice))
"""
new_instance = self.model_copy(deep=True)
new_instance._modifier = compose(*modifiers)
return new_instance
def modify_if_invalid_test(self, t8n_bal: "BlockAccessList") -> "BlockAccessList":
"""
Apply the modifier to the given BAL if this is an invalid test case.
Args:
t8n_bal: The BlockAccessList from t8n tool
Returns:
The potentially transformed BlockAccessList for the fixture
"""
if self._modifier:
return self._modifier(t8n_bal)
return t8n_bal
def verify_against(self, actual_bal: "BlockAccessList") -> None:
"""
Verify that the actual BAL from the client matches this expected BAL.
Validation steps:
1. Validate actual BAL conforms to EIP-7928 ordering requirements
2. Verify address expectations - presence or explicit absence
3. Verify expected changes within accounts match actual changes
Args:
actual_bal: The BlockAccessList model from the client
Raises:
BlockAccessListValidationError: If verification fails
"""
# validate the actual BAL structure follows EIP-7928 ordering
self._validate_bal_ordering(actual_bal)
actual_accounts_by_addr = {acc.address: acc for acc in actual_bal.root}
for address, expectation in self.account_expectations.items():
if expectation is None:
# check explicit exclusion of address when set to `None`
if address in actual_accounts_by_addr:
raise BlockAccessListValidationError(
f"Address {address} should not be in BAL but was found"
)
elif not expectation.model_fields_set:
# Disallow ambiguous BalAccountExpectation() with no fields set
raise BlockAccessListValidationError(
f"Address {address}: BalAccountExpectation() with no fields set is "
f"ambiguous. Use BalAccountExpectation.empty() to validate no changes, "
f"or explicitly set the fields to validate "
f"(e.g., nonce_changes=[...])."
)
else:
# check address is present and validate changes
if address not in actual_accounts_by_addr:
raise BlockAccessListValidationError(
f"Expected address {address} not found in actual BAL"
)
if expectation is BalAccountExpectation.empty():
# explicit check for "no changes" validation w/ .empty()
if actual_accounts_by_addr.get(address) != BalAccountChange(address=address):
raise BlockAccessListValidationError(
f"No account changes expected for {address} but found "
f"changes: {actual_accounts_by_addr[address]}"
)
actual_account = actual_accounts_by_addr[address]
try:
self._compare_account_expectations(expectation, actual_account)
except AssertionError as e:
raise BlockAccessListValidationError(f"Account {address}: {str(e)}") from e
@staticmethod
def _validate_bal_ordering(bal: "BlockAccessList") -> None:
"""
Validate BAL ordering follows EIP-7928 requirements.
Args:
bal: The BlockAccessList to validate
Raises:
BlockAccessListValidationError: If ordering is invalid
"""
# Check address ordering (ascending)
for i in range(1, len(bal.root)):
if bal.root[i - 1].address >= bal.root[i].address:
raise BlockAccessListValidationError(
f"BAL addresses are not in lexicographic order: "
f"{bal.root[i - 1].address} >= {bal.root[i].address}"
)
# Check transaction index ordering and uniqueness within accounts
for account in bal.root:
changes_to_check: List[tuple[str, BlockAccessListChangeLists]] = [
("nonce_changes", account.nonce_changes),
("balance_changes", account.balance_changes),
("code_changes", account.code_changes),
]
for field_name, change_list in changes_to_check:
if not change_list:
continue
tx_indices = [c.tx_index for c in change_list]
# Check both ordering and duplicates
if tx_indices != sorted(tx_indices):
raise BlockAccessListValidationError(
f"Transaction indices not in ascending order in {field_name} of account "
f"{account.address}. Got: {tx_indices}, Expected: {sorted(tx_indices)}"
)
if len(tx_indices) != len(set(tx_indices)):
duplicates = sorted({idx for idx in tx_indices if tx_indices.count(idx) > 1})
raise BlockAccessListValidationError(
f"Duplicate transaction indices in {field_name} of account "
f"{account.address}. Duplicates: {duplicates}"
)
# Check storage slot ordering
for i in range(1, len(account.storage_changes)):
if account.storage_changes[i - 1].slot >= account.storage_changes[i].slot:
raise BlockAccessListValidationError(
f"Storage slots not in ascending order in account "
f"{account.address}: {account.storage_changes[i - 1].slot} >= "
f"{account.storage_changes[i].slot}"
)
# Check transaction index ordering and uniqueness within storage
# slots
for storage_slot in account.storage_changes:
if not storage_slot.slot_changes:
continue
tx_indices = [c.tx_index for c in storage_slot.slot_changes]
# Check both ordering and duplicates
if tx_indices != sorted(tx_indices):
raise BlockAccessListValidationError(
f"Transaction indices not in ascending order in storage slot "
f"{storage_slot.slot} of account {account.address}. "
f"Got: {tx_indices}, Expected: {sorted(tx_indices)}"
)
if len(tx_indices) != len(set(tx_indices)):
duplicates = sorted({idx for idx in tx_indices if tx_indices.count(idx) > 1})
raise BlockAccessListValidationError(
f"Duplicate transaction indices in storage slot "
f"{storage_slot.slot} of account {account.address}. "
f"Duplicates: {duplicates}"
)
# Check storage reads ordering
for i in range(1, len(account.storage_reads)):
if account.storage_reads[i - 1] >= account.storage_reads[i]:
raise BlockAccessListValidationError(
f"Storage reads not in ascending order in account "
f"{account.address}: {account.storage_reads[i - 1]} >= "
f"{account.storage_reads[i]}"
)
@staticmethod
def _compare_account_expectations(
expected: BalAccountExpectation, actual: BalAccountChange
) -> None:
"""
Compare expected and actual account changes using
subsequence validation.
Args:
expected: The expected account changes
actual: The actual account changes from the BAL
Raises:
AssertionError: If validation fails
"""
# Check absence expectations first if defined
if expected.absent_values is not None:
expected.absent_values.validate_against(actual)
# Validate expected changes using subsequence validation
field_pairs: List[tuple[str, Any, Any]] = [
("nonce_changes", expected.nonce_changes, actual.nonce_changes),
("balance_changes", expected.balance_changes, actual.balance_changes),
("code_changes", expected.code_changes, actual.code_changes),
("storage_changes", expected.storage_changes, actual.storage_changes),
("storage_reads", expected.storage_reads, actual.storage_reads),
]
for field_name, expected_list, actual_list in field_pairs:
# Only validate fields that were explicitly set
if field_name not in expected.model_fields_set:
continue
# Check if explicitly set to empty but actual has values
if not expected_list and actual_list:
raise BlockAccessListValidationError(
f"Expected {field_name} to be empty but found {actual_list}"
)
if field_name == "storage_reads":
# storage_reads is a simple list of StorageKey
actual_idx = 0
for expected_read in expected_list:
found = False
while actual_idx < len(actual_list):
if actual_list[actual_idx] == expected_read:
found = True
actual_idx += 1
break
actual_idx += 1
if not found:
raise BlockAccessListValidationError(
f"Storage read {expected_read} not found or not in correct order. "
f"Actual reads: {actual_list}"
)
elif field_name == "storage_changes":
# storage_changes is a list of BalStorageSlot
actual_idx = 0
for expected_slot in expected_list:
found = False
while actual_idx < len(actual_list):
if actual_list[actual_idx].slot == expected_slot.slot:
# Found matching slot, now validate slot_changes
actual_slot_changes = actual_list[actual_idx].slot_changes
expected_slot_changes = expected_slot.slot_changes
if not expected_slot_changes:
# Empty expected means any
# slot_changes are acceptable
pass
else:
# Validate slot_changes as subsequence
slot_actual_idx = 0
for expected_change in expected_slot_changes:
slot_found = False
while slot_actual_idx < len(actual_slot_changes):
actual_change = actual_slot_changes[slot_actual_idx]
if (
actual_change.tx_index == expected_change.tx_index
and actual_change.post_value
== expected_change.post_value
):
slot_found = True
slot_actual_idx += 1
break
slot_actual_idx += 1
if not slot_found:
raise BlockAccessListValidationError(
f"Storage change {expected_change} not found "
f"or not in correct order in slot "
f"{expected_slot.slot}. "
f"Actual slot changes: {actual_slot_changes}"
)
found = True
actual_idx += 1
break
actual_idx += 1
if not found:
raise BlockAccessListValidationError(
f"Storage slot {expected_slot.slot} not found "
f"or not in correct order. Actual slots: "
f"{[s.slot for s in actual_list]}"
)
else:
# Handle nonce_changes, balance_changes, code_changes
# Create tuples for comparison (ordering already validated)
if field_name == "nonce_changes":
expected_tuples = [(c.tx_index, c.post_nonce) for c in expected_list]
actual_tuples = [(c.tx_index, c.post_nonce) for c in actual_list]
item_type = "nonce"
elif field_name == "balance_changes":
expected_tuples = [(c.tx_index, int(c.post_balance)) for c in expected_list]
actual_tuples = [(c.tx_index, int(c.post_balance)) for c in actual_list]
item_type = "balance"
elif field_name == "code_changes":
expected_tuples = [(c.tx_index, bytes(c.new_code)) for c in expected_list]
actual_tuples = [(c.tx_index, bytes(c.new_code)) for c in actual_list]
item_type = "code"
else:
# sanity check
raise ValueError(f"Unexpected field type: {field_name}")
# Check that expected forms a subsequence of actual
actual_idx = 0
for exp_tuple in expected_tuples:
found = False
while actual_idx < len(actual_tuples):
if actual_tuples[actual_idx] == exp_tuple:
found = True
actual_idx += 1
break
actual_idx += 1
if not found:
raise BlockAccessListValidationError(
f"{item_type.capitalize()} change {exp_tuple} not found "
f"or not in correct order. Actual changes: {actual_tuples}"
)
__all__ = [
"BalAccountExpectation",
"BlockAccessListExpectation",
"compose",
]
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/block_access_list/account_changes.py | src/ethereum_test_types/block_access_list/account_changes.py | """
Account change classes for Block Access List.
This module contains the core data structures representing changes to accounts
in a block access list as defined in EIP-7928.
"""
from typing import ClassVar, List, Union
from pydantic import Field
from ethereum_test_base_types import (
Address,
Bytes,
CamelModel,
HexNumber,
RLPSerializable,
StorageKey,
)
class BalNonceChange(CamelModel, RLPSerializable):
"""Represents a nonce change in the block access list."""
model_config = CamelModel.model_config | {"extra": "forbid"}
tx_index: HexNumber = Field(
HexNumber(1),
description="Transaction index where the change occurred",
)
post_nonce: HexNumber = Field(..., description="Nonce value after the transaction")
rlp_fields: ClassVar[List[str]] = ["tx_index", "post_nonce"]
class BalBalanceChange(CamelModel, RLPSerializable):
"""Represents a balance change in the block access list."""
model_config = CamelModel.model_config | {"extra": "forbid"}
tx_index: HexNumber = Field(
HexNumber(1),
description="Transaction index where the change occurred",
)
post_balance: HexNumber = Field(..., description="Balance after the transaction")
rlp_fields: ClassVar[List[str]] = ["tx_index", "post_balance"]
class BalCodeChange(CamelModel, RLPSerializable):
"""Represents a code change in the block access list."""
model_config = CamelModel.model_config | {"extra": "forbid"}
tx_index: HexNumber = Field(
HexNumber(1),
description="Transaction index where the change occurred",
)
new_code: Bytes = Field(..., description="New code bytes")
rlp_fields: ClassVar[List[str]] = ["tx_index", "new_code"]
class BalStorageChange(CamelModel, RLPSerializable):
"""Represents a change to a specific storage slot."""
model_config = CamelModel.model_config | {"extra": "forbid"}
tx_index: HexNumber = Field(
HexNumber(1),
description="Transaction index where the change occurred",
)
post_value: StorageKey = Field(..., description="Value after the transaction")
rlp_fields: ClassVar[List[str]] = ["tx_index", "post_value"]
class BalStorageSlot(CamelModel, RLPSerializable):
"""Represents all changes to a specific storage slot."""
model_config = CamelModel.model_config | {"extra": "forbid"}
slot: StorageKey = Field(..., description="Storage slot key")
slot_changes: List[BalStorageChange] = Field(
default_factory=list, description="List of changes to this slot"
)
rlp_fields: ClassVar[List[str]] = ["slot", "slot_changes"]
class BalAccountChange(CamelModel, RLPSerializable):
"""Represents all changes to a specific account in a block."""
model_config = CamelModel.model_config | {"extra": "forbid"}
address: Address = Field(..., description="Account address")
nonce_changes: List[BalNonceChange] = Field(
default_factory=list, description="List of nonce changes"
)
balance_changes: List[BalBalanceChange] = Field(
default_factory=list, description="List of balance changes"
)
code_changes: List[BalCodeChange] = Field(
default_factory=list, description="List of code changes"
)
storage_changes: List[BalStorageSlot] = Field(
default_factory=list, description="List of storage changes"
)
storage_reads: List[StorageKey] = Field(
default_factory=list, description="List of storage slots that were read"
)
rlp_fields: ClassVar[List[str]] = [
"address",
"storage_changes",
"storage_reads",
"balance_changes",
"nonce_changes",
"code_changes",
]
BlockAccessListChangeLists = Union[
List[BalNonceChange],
List[BalBalanceChange],
List[BalCodeChange],
]
__all__ = [
"BalNonceChange",
"BalBalanceChange",
"BalCodeChange",
"BalStorageChange",
"BalStorageSlot",
"BalAccountChange",
"BlockAccessListChangeLists",
]
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/block_access_list/exceptions.py | src/ethereum_test_types/block_access_list/exceptions.py | """Exceptions related to block access list validation."""
class BlockAccessListValidationError(Exception):
"""Custom exception for Block Access List validation errors."""
pass
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/block_access_list/account_absent_values.py | src/ethereum_test_types/block_access_list/account_absent_values.py | """
BalAccountAbsentValues class for BAL testing.
This module provides a unified class for specifying explicit absent values
in Block Access Lists. This class uses the same change classes as
BalAccountChanges to specify specific values that should NOT exist in the BAL.
For checking complete absence, use BalAccountExpectation with empty lists
instead.
"""
from typing import Any, List
from pydantic import Field, model_validator
from ethereum_test_base_types import CamelModel, StorageKey
from .account_changes import (
BalAccountChange,
BalBalanceChange,
BalCodeChange,
BalNonceChange,
BalStorageSlot,
)
from .exceptions import BlockAccessListValidationError
EMPTY_LIST_ERROR_MSG = (
"Empty lists are not allowed. This would mean 'check for any change' and "
"is bad practice. Instead, use the `BalAccountExpectation` to define "
"explicit, expected changes."
)
class BalAccountAbsentValues(CamelModel):
"""
Represents explicit absent value expectations for a specific account
in a block.
This class specifies specific changes that should NOT exist in the BAL
for a given account.
IMPORTANT: This class is for checking that specific values are absent,
NOT for checking that entire categories are empty. For complete
absence checks (e.g., "no nonce changes at all"), use
BalAccountExpectation with empty lists instead.
The validation works by checking that none of the specified
explicit changes exist in the actual BAL.
Example:
# Forbid specific nonce change at tx 1 with post_nonce=5, and specific
# storage change
absent_values = BalAccountAbsentValues(
nonce_changes=[
# Forbid exact nonce change at this tx
BalNonceChange(tx_index=1, post_nonce=5),
],
storage_changes=[
BalStorageSlot(
slot=0x42,
slot_changes=[
# Forbid exact storage change at this slot and tx
BalStorageChange(tx_index=2, post_value=0x99)
],
)
],
)
For checking complete absence:
# Use BalAccountExpectation with empty lists instead
expectation = BalAccountExpectation(
nonce_changes=[], # Expect NO nonce changes at all
storage_changes=[], # Expect NO storage changes at all
)
"""
model_config = CamelModel.model_config | {"extra": "forbid"}
nonce_changes: List[BalNonceChange] = Field(
default_factory=list,
description="List of nonce changes that should NOT exist in the BAL. "
"Validates that none of these changes are present.",
)
balance_changes: List[BalBalanceChange] = Field(
default_factory=list,
description="List of balance changes that should NOT exist in the BAL. "
"Validates that none of these changes are present.",
)
code_changes: List[BalCodeChange] = Field(
default_factory=list,
description="List of code changes that should NOT exist in the BAL. "
"Validates that none of these changes are present.",
)
storage_changes: List[BalStorageSlot] = Field(
default_factory=list,
description="List of storage slots/changes that should NOT exist in the BAL. "
"Validates that none of these changes are present.",
)
storage_reads: List[StorageKey] = Field(
default_factory=list,
description="List of storage slots that should NOT be read.",
)
@model_validator(mode="after")
def validate_specific_absences_only(self) -> "BalAccountAbsentValues":
"""Ensure absence fields contain specific values, not empty checks."""
# at least one field must have content
if not any(
[
self.nonce_changes,
self.balance_changes,
self.code_changes,
self.storage_changes,
self.storage_reads,
]
):
raise ValueError(
"At least one absence field must be specified. "
"`BalAccountAbsentValues` is for checking specific forbidden values. "
f"{EMPTY_LIST_ERROR_MSG}"
)
# check that no fields are explicitly set to empty lists
field_checks = [
("nonce_changes", self.nonce_changes),
("balance_changes", self.balance_changes),
("code_changes", self.code_changes),
("storage_changes", self.storage_changes),
("storage_reads", self.storage_reads),
]
for field_name, field_value in field_checks:
if field_name in self.model_fields_set and field_value == []:
raise ValueError(
f"`BalAccountAbsentValues.{field_name}` cannot be an empty list. "
f"{EMPTY_LIST_ERROR_MSG}"
)
# validate that storage_changes don't have empty slot_changes
for storage_slot in self.storage_changes:
if not storage_slot.slot_changes:
raise ValueError(
f"`BalAccountAbsentValues.storage_changes[{storage_slot.slot}].slot_changes` "
f"cannot be an empty list. {EMPTY_LIST_ERROR_MSG}"
)
return self
@staticmethod
def _validate_forbidden_changes(
actual_changes: List,
forbidden_changes: List,
match_fn: Any,
error_msg_fn: Any,
) -> None:
for actual in actual_changes:
for forbidden in forbidden_changes:
if match_fn(actual, forbidden):
raise BlockAccessListValidationError(error_msg_fn(actual))
def validate_against(self, account: BalAccountChange) -> None:
"""
Validate that the account does not contain the forbidden changes
specified in this object.
Args:
account: The BalAccountChange to validate against
Raises:
BlockAccessListValidationError: If any forbidden changes are found
"""
self._validate_forbidden_changes(
account.nonce_changes,
self.nonce_changes,
lambda a, f: a.tx_index == f.tx_index and a.post_nonce == f.post_nonce,
lambda a: f"Unexpected nonce change found at tx {a.tx_index}",
)
self._validate_forbidden_changes(
account.balance_changes,
self.balance_changes,
lambda a, f: a.tx_index == f.tx_index and a.post_balance == f.post_balance,
lambda a: f"Unexpected balance change found at tx {a.tx_index}",
)
self._validate_forbidden_changes(
account.code_changes,
self.code_changes,
lambda a, f: a.tx_index == f.tx_index and a.new_code == f.new_code,
lambda a: f"Unexpected code change found at tx {a.tx_index}",
)
for forbidden_storage_slot in self.storage_changes:
for actual_storage_slot in account.storage_changes:
if actual_storage_slot.slot == forbidden_storage_slot.slot:
slot_id = actual_storage_slot.slot
self._validate_forbidden_changes(
actual_storage_slot.slot_changes,
forbidden_storage_slot.slot_changes,
lambda a, f: (a.tx_index == f.tx_index and a.post_value == f.post_value),
lambda a, slot=slot_id: (
f"Unexpected storage change found at slot {slot} in tx {a.tx_index}"
),
)
for forbidden_read in self.storage_reads:
for actual_read in account.storage_reads:
if actual_read == forbidden_read:
raise BlockAccessListValidationError(
f"Unexpected storage read found at slot {actual_read}"
)
__all__ = [
"BalAccountAbsentValues",
]
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/block_access_list/modifiers.py | src/ethereum_test_types/block_access_list/modifiers.py | """
BAL modifier functions for invalid test cases.
This module provides modifier functions that can be used to modify Block Access
Lists in various ways for testing invalid block scenarios. They are composable
and can be combined to create complex modifications.
"""
from typing import Any, Callable, List, Optional
from ethereum_test_base_types import Address, HexNumber
from .. import BalCodeChange
from . import (
BalAccountChange,
BalBalanceChange,
BalNonceChange,
BalStorageChange,
BlockAccessList,
)
def _remove_field_from_accounts(
addresses: tuple[Address, ...], field_name: str
) -> Callable[[BlockAccessList], BlockAccessList]:
"""Abstracted helper to remove a field from specified accounts."""
len_addresses = len(addresses)
found_addresses = set()
def transform(bal: BlockAccessList) -> BlockAccessList:
nonlocal found_addresses
new_root = []
for account_change in bal.root:
if account_change.address in addresses:
found_addresses.add(account_change.address)
new_account = account_change.model_copy(deep=True)
# clear the specified field
setattr(new_account, field_name, [])
new_root.append(new_account)
else:
new_root.append(account_change)
if len(found_addresses) != len_addresses:
# sanity check that we found all addresses specified
missing = set(addresses) - found_addresses
raise ValueError(f"Some specified addresses were not found in the BAL: {missing}")
return BlockAccessList(root=new_root)
return transform
def _modify_field_value(
address: Address,
tx_index: int,
field_name: str,
change_class: type,
new_value: Any,
value_field: str = "post_value",
nested: bool = False,
slot: Optional[int] = None,
) -> Callable[[BlockAccessList], BlockAccessList]:
"""
Abstracted helper to modify a field value for a specific account and
transaction.
"""
found_address = False
def transform(bal: BlockAccessList) -> BlockAccessList:
nonlocal found_address
new_root = []
for account_change in bal.root:
if account_change.address == address:
found_address = True
new_account = account_change.model_copy(deep=True)
changes = getattr(new_account, field_name)
if changes:
if nested and slot is not None:
# nested structure (storage)
for storage_slot in changes:
if storage_slot.slot == slot:
for j, change in enumerate(storage_slot.slot_changes):
if change.tx_index == tx_index:
kwargs = {"tx_index": tx_index, value_field: new_value}
storage_slot.slot_changes[j] = change_class(**kwargs)
break
break
else:
# flat structure (nonce, balance, code)
for i, change in enumerate(changes):
if change.tx_index == tx_index:
kwargs = {"tx_index": tx_index, value_field: new_value}
changes[i] = change_class(**kwargs)
break
new_root.append(new_account)
else:
new_root.append(account_change)
if not found_address:
# sanity check that we actually found the address
raise ValueError(f"Address {address} not found in BAL to modify {field_name}")
return BlockAccessList(root=new_root)
return transform
def remove_accounts(*addresses: Address) -> Callable[[BlockAccessList], BlockAccessList]:
"""Remove entire account entries from the BAL."""
def transform(bal: BlockAccessList) -> BlockAccessList:
new_root = []
for account_change in bal.root:
if account_change.address not in addresses:
new_root.append(account_change)
return BlockAccessList(root=new_root)
return transform
def remove_nonces(*addresses: Address) -> Callable[[BlockAccessList], BlockAccessList]:
"""Remove nonce changes from specified accounts."""
return _remove_field_from_accounts(addresses, "nonce_changes")
def remove_balances(*addresses: Address) -> Callable[[BlockAccessList], BlockAccessList]:
"""Remove balance changes from specified accounts."""
return _remove_field_from_accounts(addresses, "balance_changes")
def remove_storage(*addresses: Address) -> Callable[[BlockAccessList], BlockAccessList]:
"""Remove storage changes from specified accounts."""
return _remove_field_from_accounts(addresses, "storage_changes")
def remove_storage_reads(*addresses: Address) -> Callable[[BlockAccessList], BlockAccessList]:
"""Remove storage reads from specified accounts."""
return _remove_field_from_accounts(addresses, "storage_reads")
def remove_code(*addresses: Address) -> Callable[[BlockAccessList], BlockAccessList]:
"""Remove code changes from specified accounts."""
return _remove_field_from_accounts(addresses, "code_changes")
def modify_nonce(
address: Address, tx_index: int, nonce: int
) -> Callable[[BlockAccessList], BlockAccessList]:
"""Set an incorrect nonce value for a specific account and transaction."""
return _modify_field_value(
address, tx_index, "nonce_changes", BalNonceChange, nonce, "post_nonce"
)
def modify_balance(
address: Address, tx_index: int, balance: int
) -> Callable[[BlockAccessList], BlockAccessList]:
"""
Set an incorrect balance value for a specific account and transaction.
"""
return _modify_field_value(
address, tx_index, "balance_changes", BalBalanceChange, balance, "post_balance"
)
def modify_storage(
address: Address, tx_index: int, slot: int, value: int
) -> Callable[[BlockAccessList], BlockAccessList]:
"""
Set an incorrect storage value for a specific account, transaction, and
slot.
"""
return _modify_field_value(
address,
tx_index,
"storage_changes",
BalStorageChange,
value,
"post_value",
nested=True,
slot=slot,
)
def modify_code(
address: Address, tx_index: int, code: bytes
) -> Callable[[BlockAccessList], BlockAccessList]:
"""Set an incorrect code value for a specific account and transaction."""
return _modify_field_value(address, tx_index, "code_changes", BalCodeChange, code, "post_code")
def swap_tx_indices(tx1: int, tx2: int) -> Callable[[BlockAccessList], BlockAccessList]:
"""Swap transaction indices throughout the BAL, modifying tx ordering."""
nonce_indices = {tx1: False, tx2: False}
balance_indices = nonce_indices.copy()
storage_indices = nonce_indices.copy()
code_indices = nonce_indices.copy()
def transform(bal: BlockAccessList) -> BlockAccessList:
nonlocal nonce_indices, balance_indices, storage_indices, code_indices
new_root = []
for account_change in bal.root:
new_account = account_change.model_copy(deep=True)
# Swap in nonce changes
if new_account.nonce_changes:
for nonce_change in new_account.nonce_changes:
if nonce_change.tx_index == tx1:
nonce_indices[tx1] = True
nonce_change.tx_index = HexNumber(tx2)
elif nonce_change.tx_index == tx2:
nonce_indices[tx2] = True
nonce_change.tx_index = HexNumber(tx1)
# Swap in balance changes
if new_account.balance_changes:
for balance_change in new_account.balance_changes:
if balance_change.tx_index == tx1:
balance_indices[tx1] = True
balance_change.tx_index = HexNumber(tx2)
elif balance_change.tx_index == tx2:
balance_indices[tx2] = True
balance_change.tx_index = HexNumber(tx1)
# Swap in storage changes (nested structure)
if new_account.storage_changes:
for storage_slot in new_account.storage_changes:
for storage_change in storage_slot.slot_changes:
if storage_change.tx_index == tx1:
balance_indices[tx1] = True
storage_change.tx_index = HexNumber(tx2)
elif storage_change.tx_index == tx2:
balance_indices[tx2] = True
storage_change.tx_index = HexNumber(tx1)
# Note: storage_reads is just a list of StorageKey, no tx_index to
# swap
# Swap in code changes
if new_account.code_changes:
for code_change in new_account.code_changes:
if code_change.tx_index == tx1:
code_indices[tx1] = True
code_change.tx_index = HexNumber(tx2)
elif code_change.tx_index == tx2:
code_indices[tx2] = True
code_change.tx_index = HexNumber(tx1)
new_root.append(new_account)
return BlockAccessList(root=new_root)
return transform
def append_account(
account_change: BalAccountChange,
) -> Callable[[BlockAccessList], BlockAccessList]:
"""Append an account to account changes."""
def transform(bal: BlockAccessList) -> BlockAccessList:
new_root = list(bal.root)
new_root.append(account_change)
return BlockAccessList(root=new_root)
return transform
def duplicate_account(address: Address) -> Callable[[BlockAccessList], BlockAccessList]:
"""Duplicate an account entry in the BAL."""
address_present = False
def transform(bal: BlockAccessList) -> BlockAccessList:
nonlocal address_present
new_root = []
for account_change in bal.root:
new_root.append(account_change)
if account_change.address == address:
# Add duplicate immediately after
new_root.append(account_change.model_copy(deep=True))
address_present = True
if not address_present:
# sanity check that we actually duplicate
raise ValueError(f"Address {address} not found in BAL to duplicate")
return BlockAccessList(root=new_root)
return transform
def reverse_accounts() -> Callable[[BlockAccessList], BlockAccessList]:
"""Reverse the order of accounts in the BAL."""
def transform(bal: BlockAccessList) -> BlockAccessList:
return BlockAccessList(root=list(reversed(bal.root)))
return transform
def sort_accounts_by_address() -> Callable[[BlockAccessList], BlockAccessList]:
"""Sort accounts by address (may modify expected ordering)."""
def transform(bal: BlockAccessList) -> BlockAccessList:
sorted_root = sorted(bal.root, key=lambda x: x.address)
return BlockAccessList(root=sorted_root)
return transform
def reorder_accounts(indices: List[int]) -> Callable[[BlockAccessList], BlockAccessList]:
"""Reorder accounts according to the provided index list."""
def transform(bal: BlockAccessList) -> BlockAccessList:
if len(indices) != len(bal.root):
raise ValueError("Index list length must match number of accounts")
new_root = [bal.root[i] for i in indices]
return BlockAccessList(root=new_root)
return transform
def clear_all() -> Callable[[BlockAccessList], BlockAccessList]:
"""Return an empty BAL."""
def transform(bal: BlockAccessList) -> BlockAccessList:
del bal
return BlockAccessList(root=[])
return transform
def keep_only(*addresses: Address) -> Callable[[BlockAccessList], BlockAccessList]:
"""Keep only the specified accounts, removing all others."""
len_addresses = len(addresses)
def transform(bal: BlockAccessList) -> BlockAccessList:
new_root = []
for account_change in bal.root:
if account_change.address in addresses:
new_root.append(account_change)
if len(new_root) != len_addresses:
# sanity check that we found all specified addresses
raise ValueError("Some specified addresses were not found in the BAL")
return BlockAccessList(root=new_root)
return transform
__all__ = [
# Core functions
# Account-level modifiers
"remove_accounts",
"append_account",
"duplicate_account",
"reverse_accounts",
"keep_only",
# Field-level modifiers
"remove_nonces",
"remove_balances",
"remove_storage",
"remove_storage_reads",
"remove_code",
# Value modifiers
"modify_nonce",
"modify_balance",
"modify_storage",
"modify_code",
# Transaction index modifiers
"swap_tx_indices",
]
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/block_access_list/t8n.py | src/ethereum_test_types/block_access_list/t8n.py | """Block Access List (BAL) for t8n tool communication and fixtures."""
from functools import cached_property
from typing import Any, List
import ethereum_rlp as eth_rlp
from pydantic import Field
from ethereum_test_base_types import Bytes, EthereumTestRootModel
from ethereum_test_base_types.serialization import to_serializable_element
from .account_changes import BalAccountChange
class BlockAccessList(EthereumTestRootModel[List[BalAccountChange]]):
"""
Block Access List for t8n tool communication and fixtures.
This model represents the BAL exactly as defined in EIP-7928
- it is itself a list of account changes (root model), not a container.
Used for:
- Communication with t8n tools
- Fixture generation
- RLP encoding for hash verification
Example:
bal = BlockAccessList([
BalAccountChange(address=alice, nonce_changes=[...]),
BalAccountChange(address=bob, balance_changes=[...])
])
"""
root: List[BalAccountChange] = Field(default_factory=list)
def to_list(self) -> List[Any]:
"""Return the list for RLP encoding per EIP-7928."""
return to_serializable_element(self.root)
@cached_property
def rlp(self) -> Bytes:
"""Return the RLP encoded block access list for hash verification."""
return Bytes(eth_rlp.encode(self.to_list()))
@cached_property
def rlp_hash(self) -> Bytes:
"""Return the hash of the RLP encoded block access list."""
return self.rlp.keccak256()
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/block_access_list/__init__.py | src/ethereum_test_types/block_access_list/__init__.py | """
Block Access List (BAL) models for EIP-7928.
Following the established pattern in the codebase (AccessList,
AuthorizationTuple), these are simple data classes that can be composed
together.
"""
from .account_absent_values import BalAccountAbsentValues
from .account_changes import (
BalAccountChange,
BalBalanceChange,
BalCodeChange,
BalNonceChange,
BalStorageChange,
BalStorageSlot,
BlockAccessListChangeLists,
)
from .exceptions import BlockAccessListValidationError
from .expectations import (
BalAccountExpectation,
BlockAccessListExpectation,
compose,
)
from .t8n import BlockAccessList
__all__ = [
# Core models
"BlockAccessList",
"BlockAccessListExpectation",
"BalAccountExpectation",
"BalAccountAbsentValues",
# Change types
"BalAccountChange",
"BalNonceChange",
"BalBalanceChange",
"BalCodeChange",
"BalStorageChange",
"BalStorageSlot",
# Utilities
"BlockAccessListChangeLists",
"BlockAccessListValidationError",
"compose",
]
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/tests/test_transactions.py | src/ethereum_test_types/tests/test_transactions.py | """Test suite for transaction signing and serialization."""
from typing import Tuple
import pytest
from ethereum_test_base_types import AccessList, Hash
from ..transaction_types import Transaction
@pytest.mark.parametrize(
[
"tx",
"expected_signature",
"expected_sender",
"expected_serialized",
],
[
(
Transaction(
ty=0,
nonce=0,
gas_price=1000000000,
protected=False,
),
(
27,
53278292994103027856810056625464356790495244130915206464977063215688423053889,
51913880459033617104276213638042305667358907564476883662899087493955291953870,
),
"0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"0xf86380843b9aca008252089400000000000000000000000000000000000000aa80801ba075ca71"
"f8b7f1e95841db86704f4fe3da864694d135e0ed12ddf936f009541a41a072c6370f0c078df435b4"
"041fe9e1fd596f7bcbd810993122b39a7f212617bace",
),
(
Transaction(
ty=0,
nonce=0,
gas_price=1000000000,
protected=False,
to=None,
),
(
27,
69580953802627422387708984158392304322597795331978871908970340300185024633230,
9987437858655471264845875982426404737641514329900923987672173898100072610198,
),
"0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"0xf84f80843b9aca008252088080801ba099d56c9a276e7b5c29f433bdc5ee0d551a242542445d2f"
"f793be942cd4cc998ea01614b083596de05d65f22e0319d969a8465732ce5ad199c41c17fd72a651"
"7996",
),
(
Transaction(
ty=0,
nonce=0,
gas_price=1000000000,
protected=True,
),
(
37,
43493668498277122407922969255529421324465897185389682326746699251814478581534,
22805784714726510606244238945786421174106485654201651664508077741484361436093,
),
"0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"0xf86380843b9aca008252089400000000000000000000000000000000000000aa808025a060288b"
"4319025f4955e36c53831871a91b2b59131b0355dbbc01a34f05b30f1ea0326b9de159e61d79e55c"
"1844a8b0de520eef2fcb8b2992750c2f694d841ccbbd",
),
(
Transaction(
ty=1,
nonce=0,
gas_price=1000000000,
),
(
1,
64717097837956073364449107040675652683171442339602810813343912669101132492723,
16766093433587703483635506527630997640109434240457156669715758246025787266781,
),
"0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"0x01f8650180843b9aca008252089400000000000000000000000000000000000000aa8080c001a0"
"8f14944d8d46e2b6280d61afee759646d42aa23189e0764ed409e68f45962fb3a0251145c8de5edc"
"9a19b3244f37caca6858aec3a1056330e251491881cbd2d6dd",
),
(
Transaction(
ty=1,
nonce=0,
gas_price=1000000000,
access_list=[],
),
(
1,
64717097837956073364449107040675652683171442339602810813343912669101132492723,
16766093433587703483635506527630997640109434240457156669715758246025787266781,
),
"0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"0x01f8650180843b9aca008252089400000000000000000000000000000000000000aa8080c001a0"
"8f14944d8d46e2b6280d61afee759646d42aa23189e0764ed409e68f45962fb3a0251145c8de5edc"
"9a19b3244f37caca6858aec3a1056330e251491881cbd2d6dd",
),
(
Transaction(
ty=1,
nonce=0,
gas_price=1000000000,
access_list=[
AccessList(
address="0x0000000000000000000000000000000000000123",
storage_keys=[0x456, 0x789],
)
],
),
(
0,
66978004263684299215005885298552000328779940885769675563360335351527355325681,
56105983548446712608196400571580400910290560012106232439738315212890613615554,
),
"0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"0x01f8c10180843b9aca008252089400000000000000000000000000000000000000aa8080f85bf8"
"59940000000000000000000000000000000000000123f842a0000000000000000000000000000000"
"0000000000000000000000000000000456a000000000000000000000000000000000000000000000"
"0000000000000000078980a0941434fdc19a5853453cad120ebdea00bc0fce323301794b908ca9f7"
"a0661cf1a07c0adc80aec2b076a8dbfde04e0a51de29e9e904510f804cd57e153a804e0bc2",
),
(
Transaction(
ty=1,
nonce=0,
gas_price=1000000000,
to=None,
access_list=[AccessList(address=0x123, storage_keys=[0x456, 0x789])],
),
(
0,
16814800520830332761874524721118962980778925570205706327283408113434790879234,
38982159227826105391951884315531363239837729091757253660549724931098838198780,
),
"0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"0x01f8ad0180843b9aca00825208808080f85bf85994000000000000000000000000000000000000"
"0123f842a00000000000000000000000000000000000000000000000000000000000000456a00000"
"00000000000000000000000000000000000000000000000000000000078980a0252cd6ff24fb485a"
"50aa3cc4e11947e257c325213a6d5c6ae2ea70cb68b26002a0562f1ec7bfd17a0cc72ae25192b8a7"
"450b315a4a8bcea8f60281d3e72bd669fc",
),
(
Transaction(
ty=2,
nonce=0,
access_list=[AccessList(address=0x123, storage_keys=[0x456, 0x789])],
max_fee_per_gas=10,
max_priority_fee_per_gas=5,
),
(
0,
91749892362404225540206401600149574009569116775084797886968775355264509620768,
44616954018220623825844796436003012227293665710878821792267483622343477105629,
),
"0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"0x02f8be0180050a8252089400000000000000000000000000000000000000aa8080f85bf8599400"
"00000000000000000000000000000000000123f842a0000000000000000000000000000000000000"
"0000000000000000000000000456a000000000000000000000000000000000000000000000000000"
"0000000000078980a0cad8994ac160fd7e167715bbe20212939abdd5cd5a1f6c4dd6e5612cd8b332"
"20a062a44d12b176bbd669d09d20d26281b5a693d8a52ab02a9d130201ee5db113dd",
),
(
Transaction(
ty=2,
nonce=0,
to=None,
access_list=[AccessList(address=0x123, storage_keys=[0x456, 0x789])],
max_fee_per_gas=10,
max_priority_fee_per_gas=5,
),
(
0,
90322080068302816931882206183311797596224841408506356995778410737685074239457,
11150681916082931632476906514672946504836769153730288987778622018872414351162,
),
"0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"0x02f8aa0180050a825208808080f85bf859940000000000000000000000000000000000000123f8"
"42a00000000000000000000000000000000000000000000000000000000000000456a00000000000"
"00000000000000000000000000000000000000000000000000078980a0c7b07c5552829e585f68e2"
"eed4495ed6dfbe8cb1453edb2dc1e959d1087f5fe1a018a70ff379958b47e85172bc93fe5e47dc23"
"d13e3b0e4a800f1f3a0766a0af3a",
),
(
Transaction(
ty=3,
nonce=0,
access_list=[AccessList(address=0x123, storage_keys=[0x456, 0x789])],
max_fee_per_gas=10,
max_priority_fee_per_gas=5,
max_fee_per_blob_gas=100,
blob_versioned_hashes=[],
),
(
0,
48031212734270141632897997738964470162703155533103542626635301519303700733477,
25274846027382763458393508666208718022841865508839207374090140639125166603463,
),
"0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"0x03f8c00180050a8252089400000000000000000000000000000000000000aa8080f85bf8599400"
"00000000000000000000000000000000000123f842a0000000000000000000000000000000000000"
"0000000000000000000000000456a000000000000000000000000000000000000000000000000000"
"0000000000078964c080a06a30b3f8fd434b55ee40d662263ffa98ff9c31ca0f9bce61ca5de5019c"
"4d5e25a037e10e4f6ca934236d6bf064134f7c3203b7308a16d5c43b3c9ce8b8a6fbbcc7",
),
(
Transaction(
ty=3,
nonce=0,
access_list=[AccessList(address=0x123, storage_keys=[0x456, 0x789])],
max_fee_per_gas=10,
max_priority_fee_per_gas=5,
max_fee_per_blob_gas=100,
blob_versioned_hashes=[Hash(0), Hash(0x01)],
),
(
1,
16459258601065735918558202846976552354069849089672096317954578689965269615539,
13812345945591193204859005420918043741474532833353814142223502482030426489098,
),
"0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"0x03f901030180050a8252089400000000000000000000000000000000000000aa8080f85bf85994"
"0000000000000000000000000000000000000123f842a00000000000000000000000000000000000"
"000000000000000000000000000456a0000000000000000000000000000000000000000000000000"
"000000000000078964f842a000000000000000000000000000000000000000000000000000000000"
"00000000a0000000000000000000000000000000000000000000000000000000000000000101a024"
"639c3863663bb71a82b48482fd92428a4e1e6962c8ebe467a72adf2dc283b3a01e8982c15e3b5b53"
"e90ae56d2a6e93ebd918d778ff0cf7f4f8f96eb2f472810a",
),
],
ids=[
"type-0-not-protected",
"type-0-protected-contract-creation",
"type-0-protected",
"type-1",
"type-1-access-list-empty",
"type-1-access-list-filled",
"type-1-access-list-filled-contract-creation",
"type-2",
"type-2-contract-creation",
"type-3-minimal-empty-blobs",
"type-3-minimal-two-blobs",
],
)
def test_transaction_signing(
tx: Transaction,
expected_signature: Tuple[int, int, int],
expected_sender: str,
expected_serialized: str,
) -> None:
"""Test that transaction signing / serialization works as expected."""
tx = tx.with_signature_and_sender()
signature = (tx.v, tx.r, tx.s)
assert signature is not None
assert signature == expected_signature
assert tx.sender is not None
assert tx.sender.hex() == expected_sender
assert (tx.rlp().hex()) == expected_serialized
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/tests/test_post_alloc.py | src/ethereum_test_types/tests/test_post_alloc.py | """Test suite for test spec submodules of the `ethereum_test` module."""
from typing import Type
import pytest
from ethereum_test_base_types import Account
from ethereum_test_types import Alloc
@pytest.fixture()
def post(request: pytest.FixtureRequest) -> Alloc:
"""
Post state: Set from the test's indirectly parametrized `post` parameter.
"""
return Alloc.model_validate(request.param)
@pytest.fixture()
def alloc(request: pytest.FixtureRequest) -> Alloc:
"""
Alloc state: Set from the test's indirectly parametrized `alloc` parameter.
"""
return Alloc.model_validate(request.param)
@pytest.mark.parametrize(
["post", "alloc", "expected_exception_type"],
[
# Account should not exist but contained in alloc
(
{"0x0000000000000000000000000000000000000000": Account.NONEXISTENT},
{
"0x0000000000000000000000000000000000000000": {
"nonce": "1",
"code": "0x123",
"balance": "1",
"storage": {0: 1},
}
},
Alloc.UnexpectedAccountError,
),
# Account should not exist but contained in alloc
(
{"0x0000000000000000000000000000000000000000": Account.NONEXISTENT},
{"0x0000000000000000000000000000000000000000": {"nonce": "1"}},
Alloc.UnexpectedAccountError,
),
# Account should not exist but contained in alloc
(
{"0x0000000000000000000000000000000000000001": Account.NONEXISTENT},
{"0x0000000000000000000000000000000000000001": {"balance": "1"}},
Alloc.UnexpectedAccountError,
),
# Account should not exist but contained in alloc
(
{"0x000000000000000000000000000000000000000a": Account.NONEXISTENT},
{"0x000000000000000000000000000000000000000A": {"code": "0x00"}},
Alloc.UnexpectedAccountError,
),
# Account should exist but not in alloc
(
{"0x000000000000000000000000000000000000000A": Account()},
{
"0x000000000000000000000000000000000000000B": {
"nonce": "1",
"code": "0x123",
"balance": "1",
"storage": {0: 1},
}
},
Alloc.MissingAccountError,
),
# Account should exist and contained in alloc, but don't care about
# values
(
{"0x0000000000000000000000000000000000000001": Account()},
{
"0x0000000000000000000000000000000000000001": {
"nonce": "1",
"code": "0x123",
"balance": "1",
"storage": {0: 1},
}
},
None,
),
# Account should exist and contained in alloc, single incorrect value
(
{"0x0000000000000000000000000000000000000001": Account(nonce=0)},
{
"0x0000000000000000000000000000000000000001": {
"nonce": "1",
"code": "0x123",
"balance": "1",
"storage": {0: 1},
}
},
Account.NonceMismatchError,
),
],
indirect=["post", "alloc"],
)
def test_verify_post_alloc(
post: Alloc, alloc: Alloc, expected_exception_type: Type[Exception] | None
) -> None:
"""Test `verify_post_alloc` method of `Alloc`."""
if expected_exception_type is None:
post.verify_post_alloc(alloc)
else:
with pytest.raises(expected_exception_type) as _:
post.verify_post_alloc(alloc)
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/tests/test_blob_types.py | src/ethereum_test_types/tests/test_blob_types.py | """Test suite for blobs."""
import copy
import time
from typing import Any
import pytest
from filelock import FileLock
from ethereum_test_forks import (
Cancun,
Osaka,
Prague,
)
from ethereum_test_forks.forks.transition import (
CancunToPragueAtTime15k,
PragueToOsakaAtTime15k,
ShanghaiToCancunAtTime15k,
)
from ..blob_types import CACHED_BLOBS_DIRECTORY, Blob, clear_blob_cache
def increment_counter(timeout: float = 10) -> int:
"""
Increment counter in file, creating if doesn't exist.
This is needed because we require the unit test
'test_transition_fork_blobs' to run at the end without having to include
another dependency for ordering tests. That test has to run at the end
because it assumes that no json blobs not created by itself are created
while it is running.
The hardcoded counter value in the test above has to be updated if any new
blob_related unit tests that create json blobs are added in the future.
"""
file_path = CACHED_BLOBS_DIRECTORY / "blob_unit_test_counter.txt"
lock_file = file_path.with_suffix(".lock")
with FileLock(lock_file, timeout=timeout):
# Read current value or start at 0
if file_path.exists():
current_value = int(file_path.read_text().strip())
else:
current_value = 0
# Increment and write back
new_value = current_value + 1
file_path.write_text(str(new_value))
return new_value
def wait_until_counter_reached(target: int, poll_interval: float = 0.1) -> int:
"""Wait until blob unit test counter reaches target value."""
file_path = CACHED_BLOBS_DIRECTORY / "blob_unit_test_counter.txt"
lock_file = file_path.with_suffix(".lock") # Add lock file path
while True:
# Use FileLock when reading!
with FileLock(lock_file, timeout=10):
if file_path.exists():
try:
current_value = int(file_path.read_text().strip())
if current_value == target:
return current_value
elif current_value > target:
pytest.fail(
f"The blob_unit_test lock counter is too high! "
f"Expected {target}, but got {current_value}. "
f"It probably reused an existing file that was not cleared. "
f"Delete {file_path} manually to fix this."
)
except Exception:
current_value = 0
else:
current_value = 0
time.sleep(poll_interval)
@pytest.mark.parametrize("seed", [0, 10, 100])
@pytest.mark.parametrize("fork", [Cancun, Prague, Osaka])
def test_blob_creation_and_writing_and_reading(
seed: int,
fork: Any,
) -> None: # noqa: F811
"""
Generates blobs for different forks and ensures writing to file and reading
from file works as expected.
"""
timestamp = 100
b = Blob.from_fork(fork=fork, seed=seed, timestamp=timestamp)
b.write_to_file()
# read from file
# determine what filename would be
cell_proof_amount = str(fork.get_blob_constant("AMOUNT_CELL_PROOFS"))
file_name = "blob_" + str(seed) + "_cell_proofs_" + cell_proof_amount + ".json"
# read
restored = Blob.from_file(file_name)
# ensure file you read equals file you wrote
assert b.model_dump() == restored.model_dump()
increment_counter()
@pytest.mark.parametrize(
"corruption_mode",
[
Blob.ProofCorruptionMode.CORRUPT_ALL_BYTES,
Blob.ProofCorruptionMode.CORRUPT_FIRST_BYTE,
Blob.ProofCorruptionMode.CORRUPT_LAST_BYTE,
Blob.ProofCorruptionMode.CORRUPT_TO_ALL_ZEROES,
],
)
@pytest.mark.parametrize("fork", [Cancun, Prague, Osaka])
def test_blob_proof_corruption(
corruption_mode: Any,
fork: Any,
) -> None:
"""
Generates blobs for different forks, corrupts their proofs and ensures that
the corrupted proof is not equal to the correct proof.
"""
timestamp = 100
b = Blob.from_fork(fork=fork, timestamp=timestamp)
old_valid_proof = copy.deepcopy(b.proof) # important to deepcopy
b.corrupt_proof(corruption_mode)
assert b.proof != old_valid_proof, (
f"Proof corruption mode {corruption_mode} for fork {fork.name()} failed, "
"proof is unchanged!"
)
increment_counter()
@pytest.mark.parametrize("timestamp", [14999, 15000])
@pytest.mark.parametrize(
"fork", [ShanghaiToCancunAtTime15k, CancunToPragueAtTime15k, PragueToOsakaAtTime15k]
)
def test_transition_fork_blobs(
fork: Any,
timestamp: int,
) -> None:
"""
Generates blobs for transition forks (time 14999 is old fork, time 15000 is
new fork).
"""
# line below guarantees that this test runs only after the other blob unit
# tests are done
wait_until_counter_reached(21)
clear_blob_cache(CACHED_BLOBS_DIRECTORY)
print(f"Original fork: {fork}, Timestamp: {timestamp}")
pre_transition_fork = fork.transitions_from()
# only reached if timestamp >= 15000
post_transition_fork_at_15k = fork.transitions_to()
if not pre_transition_fork.supports_blobs() and timestamp < 15000:
print(
f"Skipping blob creation because pre-transition fork is {pre_transition_fork} "
f"and timestamp is {timestamp}"
)
return
# b has already applied transition if requirements were met
b = Blob.from_fork(fork=fork, timestamp=timestamp)
print(f"Fork of created blob: {b.fork.name()}")
if timestamp == 14999: # case: no transition yet
assert b.fork.name() == pre_transition_fork.name(), (
f"Transition fork failure! Fork {fork.name()} at timestamp: {timestamp} should have "
f"stayed at fork {pre_transition_fork.name()} but has unexpectedly transitioned "
f"to {b.fork.name()}"
)
elif timestamp == 15000: # case: transition to next fork has happened
assert b.fork.name() == post_transition_fork_at_15k.name(), (
f"Transition fork failure! Fork {fork.name()} at timestamp: {timestamp} should have "
f"transitioned to {post_transition_fork_at_15k.name()} but is still at {b.fork.name()}"
)
# delete counter at last iteration (otherwise re-running all unit tests
# will fail)
if timestamp == 15_000 and pre_transition_fork == Prague:
(CACHED_BLOBS_DIRECTORY / "blob_unit_test_counter.txt").unlink()
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/tests/test_types.py | src/ethereum_test_types/tests/test_types.py | """Test suite for `ethereum_test` module."""
from typing import Any, Dict, List
import pytest
from ethereum_test_base_types import (
AccessList,
Account,
Address,
Bytes,
Storage,
TestPrivateKey,
ZeroPaddedHexNumber,
to_json,
)
from ethereum_test_base_types.pydantic import CopyValidateModel
from ..account_types import EOA, Alloc
from ..block_types import (
Environment,
Withdrawal,
)
from ..transaction_types import (
AuthorizationTuple,
Transaction,
)
def test_storage() -> None:
"""Test `ethereum_test.types.storage` parsing."""
s = Storage({"10": "0x10"}) # type: ignore[dict-item]
assert 10 in s
assert s[10] == 16
s = Storage({"10": "10"}) # type: ignore[dict-item]
assert 10 in s
assert s[10] == 10
s = Storage({10: 10}) # type: ignore[dict-item]
assert 10 in s
assert s[10] == 10
iter_s = iter(Storage({10: 20, "11": "21"})) # type: ignore[dict-item]
assert next(iter_s) == 10
assert next(iter_s) == 11
s["10"] = "0x10"
s["0x10"] = "10"
assert s[10] == 16
assert s[16] == 10
assert "10" in s
assert "0xa" in s
assert 10 in s
del s[10]
assert "10" not in s
assert "0xa" not in s
assert 10 not in s
s = Storage({-1: -1, -2: -2}) # type: ignore[dict-item]
assert s[-1] == 2**256 - 1
assert s[-2] == 2**256 - 2
d = to_json(s)
assert (
d["0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"]
== "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"
)
assert (
d["0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe"]
== "0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe"
)
# Try to add a duplicate key (negative and positive number at the same
# time)
# same value, ok
s[2**256 - 1] = 2**256 - 1
to_json(s)
# Check store counter
s = Storage({})
s.store_next(0x100)
s.store_next("0x200")
s.store_next(b"\x03\x00".rjust(32, b"\x00"))
d = to_json(s)
assert d == {
"0x00": ("0x0100"),
"0x01": ("0x0200"),
"0x02": ("0x0300"),
}
@pytest.mark.parametrize(
["account"],
[
pytest.param(
Account(),
id="no_fields",
),
pytest.param(
Account(
nonce=0,
),
id="zero_nonce",
),
pytest.param(
Account(
balance=0,
),
id="zero_balance",
),
pytest.param(
Account(
code="",
),
id="empty_code",
),
pytest.param(
Account(
storage={},
),
id="empty_storage",
),
pytest.param(
Account(
nonce=0,
balance=0,
code="",
storage={
1: 0,
},
),
id="only_zero_storage_values",
),
],
)
def test_empty_accounts(account: Account) -> None:
"""Test `ethereum_test.types.account` parsing."""
assert not bool(account)
@pytest.mark.parametrize(
["account", "alloc_dict", "should_pass"],
[
# All None: Pass
(
Account(),
{"nonce": "1", "code": "0x123", "balance": "1", "storage": {0: 1}},
True,
),
# Storage must be empty: Fail
(
Account(storage={}),
{"nonce": "1", "code": "0x123", "balance": "1", "storage": {0: 1}},
False,
),
# Storage must be empty: Pass
(
Account(storage={}),
{"nonce": "1", "code": "0x123", "balance": "1", "storage": {}},
True,
),
# Storage must be empty: Pass
(
Account(storage={}),
{
"nonce": "1",
"code": "0x123",
"balance": "1",
"storage": {0: 0, 1: 0},
},
True,
),
# Storage must be empty: Pass
(
Account(storage={0: 0}),
{
"nonce": "1",
"code": "0x123",
"balance": "1",
"storage": {},
},
True,
),
# Storage must not be empty: Pass
(
Account(storage={1: 1}),
{
"nonce": "1",
"code": "0x123",
"balance": "1",
"storage": {0: 0, 1: 1},
},
True,
),
# Storage must not be empty: Fail
(
Account(storage={1: 1}),
{
"nonce": "1",
"code": "0x123",
"balance": "1",
"storage": {0: 0, 1: 1, 2: 2},
},
False,
),
# Code must be empty: Fail
(
Account(code=""),
{
"nonce": "0",
"code": "0x123",
"balance": "0",
"storage": {},
},
False,
),
# Code must be empty: Pass
(
Account(code=""),
{
"nonce": "1",
"code": "0x",
"balance": "1",
"storage": {0: 0, 1: 1},
},
True,
),
# Nonce must be empty: Fail
(
Account(nonce=0),
{
"nonce": "1",
"code": "0x",
"balance": "0",
"storage": {},
},
False,
),
# Nonce must be empty: Pass
(
Account(nonce=0),
{
"nonce": "0",
"code": "0x1234",
"balance": "1",
"storage": {0: 0, 1: 1},
},
True,
),
# Nonce must not be empty: Fail
(
Account(nonce=1),
{
"code": "0x1234",
"balance": "1",
"storage": {0: 0, 1: 1},
},
False,
),
# Nonce must not be empty: Pass
(
Account(nonce=1),
{
"nonce": "1",
"code": "0x",
"balance": "0",
"storage": {},
},
True,
),
# Balance must be empty: Fail
(
Account(balance=0),
{
"nonce": "0",
"code": "0x",
"balance": "1",
"storage": {},
},
False,
),
# Balance must be empty: Pass
(
Account(balance=0),
{
"nonce": "1",
"code": "0x1234",
"balance": "0",
"storage": {0: 0, 1: 1},
},
True,
),
# Balance must not be empty: Fail
(
Account(balance=1),
{
"nonce": "1",
"code": "0x1234",
"storage": {0: 0, 1: 1},
},
False,
),
# Balance must not be empty: Pass
(
Account(balance=1),
{
"nonce": "0",
"code": "0x",
"balance": "1",
"storage": {},
},
True,
),
],
)
def test_account_check_alloc(
account: Account, alloc_dict: Dict[Any, Any], should_pass: bool
) -> None:
"""Test `Account.check_alloc` method."""
alloc_account = Account(**alloc_dict)
if should_pass:
account.check_alloc(Address(1), alloc_account)
else:
with pytest.raises(Exception) as _:
account.check_alloc(Address(1), alloc_account)
@pytest.mark.parametrize(
["alloc_1", "alloc_2", "expected_alloc"],
[
pytest.param(
Alloc(),
Alloc(),
Alloc(),
id="empty_alloc",
),
pytest.param(
Alloc({0x1: {"nonce": 1}}), # type: ignore
Alloc({0x2: {"nonce": 2}}), # type: ignore
Alloc({0x1: Account(nonce=1), 0x2: Account(nonce=2)}), # type: ignore
id="alloc_different_accounts",
),
pytest.param(
Alloc({0x2: {"nonce": 1}}), # type: ignore
Alloc({"0x0000000000000000000000000000000000000002": {"nonce": 2}}), # type: ignore
Alloc({0x2: Account(nonce=2)}), # type: ignore
id="overwrite_account",
),
pytest.param(
Alloc({0x2: {"balance": 1}}), # type: ignore
Alloc({"0x0000000000000000000000000000000000000002": {"nonce": 1}}), # type: ignore
Alloc({0x2: Account(balance=1, nonce=1)}), # type: ignore
id="mix_account",
),
],
)
def test_alloc_append(alloc_1: Alloc, alloc_2: Alloc, expected_alloc: Alloc) -> None:
"""Test `ethereum_test.types.alloc` merging."""
assert Alloc.merge(alloc_1, alloc_2) == expected_alloc
@pytest.mark.parametrize(
["account_1", "account_2", "expected_account"],
[
pytest.param(
Account(),
Account(),
Account(),
id="empty_accounts",
),
pytest.param(
None,
None,
Account(),
id="none_accounts",
),
pytest.param(
Account(nonce=1),
Account(code="0x6000"),
Account(nonce=1, code="0x6000"),
id="accounts_with_different_fields",
),
pytest.param(
Account(nonce=1),
Account(nonce=2),
Account(nonce=2),
id="accounts_with_different_nonce",
),
],
)
def test_account_merge(
account_1: Account | None, account_2: Account | None, expected_account: Account
) -> None:
"""Test `ethereum_test.types.account` merging."""
assert Account.merge(account_1, account_2) == expected_account
CHECKSUM_ADDRESS = "0x8a0A19589531694250d570040a0c4B74576919B8"
@pytest.mark.parametrize(
["can_be_deserialized", "model_instance", "json"],
[
pytest.param(
True,
Address(CHECKSUM_ADDRESS),
CHECKSUM_ADDRESS,
marks=pytest.mark.xfail,
id="address_with_checksum_address",
),
pytest.param(
True,
Account(),
{
"nonce": "0x00",
"balance": "0x00",
"code": "0x",
"storage": {},
},
id="account_1",
),
pytest.param(
True,
Account(
nonce=1,
balance=2,
code="0x1234",
storage={
0: 0,
1: 1,
},
),
{
"nonce": "0x01",
"balance": "0x02",
"code": "0x1234",
"storage": {
"0x00": "0x00",
"0x01": "0x01",
},
},
id="account_2",
),
pytest.param(
True,
Withdrawal(index=0, validator_index=1, address=0x1234, amount=2),
{
"index": "0x0",
"validatorIndex": "0x1",
"address": "0x0000000000000000000000000000000000001234",
"amount": "0x2",
},
id="withdrawal",
),
pytest.param(
True,
Environment(),
{
"currentCoinbase": "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentGasLimit": str(ZeroPaddedHexNumber(Environment().gas_limit)),
"currentNumber": "0x01",
"currentTimestamp": "0x03e8",
"blockHashes": {},
"ommers": [],
"parentUncleHash": (
"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347"
),
},
id="environment_1",
),
pytest.param(
True,
Environment(
fee_recipient=0x1234,
difficulty=0x5,
prev_randao=0x6,
base_fee_per_gas=0x7,
parent_difficulty=0x8,
parent_timestamp=0x9,
parent_base_fee_per_gas=0xA,
parent_gas_used=0xB,
parent_gas_limit=0xC,
parent_ommers_hash=0xD,
withdrawals=[Withdrawal(index=0, validator_index=1, address=0x1234, amount=2)],
parent_blob_gas_used=0xE,
parent_excess_blob_gas=0xF,
blob_gas_used=0x10,
excess_blob_gas=0x11,
block_hashes={1: 2, 3: 4},
),
{
"currentCoinbase": "0x0000000000000000000000000000000000001234",
"currentGasLimit": str(ZeroPaddedHexNumber(Environment().gas_limit)),
"currentNumber": "0x01",
"currentTimestamp": "0x03e8",
"currentDifficulty": "0x05",
"currentRandom": "0x06",
"currentBaseFee": "0x07",
"parentDifficulty": "0x08",
"parentTimestamp": "0x09",
"parentBaseFee": "0x0a",
"parentGasUsed": "0x0b",
"parentGasLimit": "0x0c",
"parentUncleHash": (
"0x000000000000000000000000000000000000000000000000000000000000000d"
),
"withdrawals": [
{
"index": "0x0",
"validatorIndex": "0x1",
"address": "0x0000000000000000000000000000000000001234",
"amount": "0x2",
},
],
"parentBlobGasUsed": "0x0e",
"parentExcessBlobGas": "0x0f",
"currentBlobGasUsed": "0x10",
"currentExcessBlobGas": "0x11",
"blockHashes": {
"0x01": "0x0000000000000000000000000000000000000000000000000000000000000002",
"0x03": "0x0000000000000000000000000000000000000000000000000000000000000004",
},
"parentHash": "0x0000000000000000000000000000000000000000000000000000000000000004",
"ommers": [],
},
id="environment_2",
),
pytest.param(
True,
Transaction().with_signature_and_sender(),
{
"type": "0x0",
"chainId": "0x1",
"nonce": "0x0",
"to": "0x00000000000000000000000000000000000000aa",
"value": "0x0",
"input": "0x",
"gas": "0x5208",
"gasPrice": "0xa",
"v": "0x26",
"r": "0xcc61d852649c34cc0b71803115f38036ace257d2914f087bf885e6806a664fbd",
"s": "0x2020cb35f5d7731ab540d62614503a7f2344301a86342f67daf011c1341551ff",
"sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
},
id="transaction_t8n_default_args",
),
pytest.param(
True,
Transaction(
to=None,
).with_signature_and_sender(),
{
"type": "0x0",
"chainId": "0x1",
"nonce": "0x0",
"to": None,
"value": "0x0",
"input": "0x",
"gas": "0x5208",
"gasPrice": "0xa",
"v": "0x25",
"r": "0x1cfe2cbb0c3577f74d9ae192a7f1ee2d670fe806a040f427af9cb768be3d07ce",
"s": "0xcbe2d029f52dbf93ade486625bed0603945d2c7358b31de99fe8786c00f13da",
"sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
},
id="transaction_t8n_to_none",
),
pytest.param(
True,
Transaction(
to="",
).with_signature_and_sender(),
{
"type": "0x0",
"chainId": "0x1",
"nonce": "0x0",
"to": None,
"value": "0x0",
"input": "0x",
"gas": "0x5208",
"gasPrice": "0xa",
"v": "0x25",
"r": "0x1cfe2cbb0c3577f74d9ae192a7f1ee2d670fe806a040f427af9cb768be3d07ce",
"s": "0xcbe2d029f52dbf93ade486625bed0603945d2c7358b31de99fe8786c00f13da",
"sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
},
id="transaction_t8n_to_empty_str",
),
pytest.param(
True,
Transaction(
to=0x1234,
data=b"\x01\x00",
access_list=[
AccessList(
address=0x1234,
storage_keys=[0, 1],
)
],
max_priority_fee_per_gas=10,
max_fee_per_gas=20,
max_fee_per_blob_gas=30,
blob_versioned_hashes=[0, 1],
).with_signature_and_sender(),
{
"type": "0x3",
"chainId": "0x1",
"nonce": "0x0",
"to": "0x0000000000000000000000000000000000001234",
"accessList": [
{
"address": "0x0000000000000000000000000000000000001234",
"storageKeys": [
"0x0000000000000000000000000000000000000000000000000000000000000000",
"0x0000000000000000000000000000000000000000000000000000000000000001",
],
}
],
"value": "0x0",
"input": "0x0100",
"gas": "0x5208",
"maxPriorityFeePerGas": "0xa",
"maxFeePerGas": "0x14",
"maxFeePerBlobGas": "0x1e",
"blobVersionedHashes": [
"0x0000000000000000000000000000000000000000000000000000000000000000",
"0x0000000000000000000000000000000000000000000000000000000000000001",
],
"v": "0x0",
"r": "0x418bb557c43262375f80556cb09dac5e67396acf0eaaf2c2540523d1ce54b280",
"s": "0x4fa36090ea68a1138043d943ced123c0b0807d82ff3342a6977cbc09230e927c",
"sender": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
},
id="transaction_3",
),
],
)
class TestPydanticModelConversion:
"""Test that Pydantic models are converted to and from JSON correctly."""
def test_json_serialization(
self, can_be_deserialized: bool, model_instance: Any, json: str | Dict[str, Any]
) -> None:
"""Test that to_json returns the expected JSON for the given object."""
del can_be_deserialized
assert to_json(model_instance) == json
def test_json_deserialization(
self, can_be_deserialized: bool, model_instance: Any, json: str | Dict[str, Any]
) -> None:
"""Test that to_json returns the expected JSON for the given object."""
if not can_be_deserialized:
pytest.skip(reason="The model instance in this case can not be deserialized")
model_type = type(model_instance)
assert model_type(**json) == model_instance
@pytest.mark.parametrize(
["invalid_tx_args", "expected_exception", "expected_exception_substring"],
[
pytest.param(
{"gas_price": 1, "max_fee_per_gas": 2},
Transaction.InvalidFeePaymentError,
"only one type of fee payment field can be used",
id="gas-price-and-max-fee-per-gas",
),
pytest.param(
{"gas_price": 1, "max_priority_fee_per_gas": 2},
Transaction.InvalidFeePaymentError,
"only one type of fee payment field can be used",
id="gas-price-and-max-priority-fee-per-gas",
),
pytest.param(
{"gas_price": 1, "max_fee_per_blob_gas": 2},
Transaction.InvalidFeePaymentError,
"only one type of fee payment field can be used",
id="gas-price-and-max-fee-per-blob-gas",
),
pytest.param(
{"ty": 0, "v": 1, "secret_key": 2},
Transaction.InvalidSignaturePrivateKeyError,
"can't define both 'signature' and 'private_key'",
id="type0-signature-and-secret-key",
),
],
)
def test_transaction_post_init_invalid_arg_combinations( # noqa: D103
invalid_tx_args: Any, expected_exception: Any, expected_exception_substring: str
) -> None:
"""
Test that Transaction.__post_init__ raises the expected exceptions for
invalid constructor argument combinations.
"""
with pytest.raises(expected_exception) as exc_info:
Transaction(**invalid_tx_args)
assert expected_exception_substring in str(exc_info.value)
@pytest.mark.parametrize(
["tx_args", "expected_attributes_and_values"],
[
pytest.param(
{"max_fee_per_blob_gas": 10},
[
("ty", 3),
],
id="max_fee_per_blob_gas-adds-ty-3",
),
pytest.param(
{},
[
("gas_price", 10),
],
id="no-fees-adds-gas_price",
),
pytest.param(
{},
[
("secret_key", TestPrivateKey),
],
id="no-signature-adds-secret_key",
),
pytest.param(
{"max_fee_per_gas": 10},
[
("ty", 2),
],
id="max_fee_per_gas-adds-ty-2",
),
pytest.param(
{"access_list": [AccessList(address=0x1234, storage_keys=[0, 1])]},
[
("ty", 1),
],
id="access_list-adds-ty-1",
),
pytest.param(
{"ty": 1},
[
("access_list", []),
],
id="ty-1-adds-empty-access_list",
),
pytest.param(
{"ty": 2},
[
("max_priority_fee_per_gas", 0),
],
id="ty-2-adds-max_priority_fee_per_gas",
),
pytest.param(
{"to": Address(1)},
[
("to", Address(1)),
],
id="non-zero-to",
),
pytest.param(
{"to": Address(0)},
[
("to", Address(0)),
],
id="zero-to",
),
],
)
def test_transaction_post_init_defaults(tx_args: Any, expected_attributes_and_values: Any) -> None:
"""
Test that Transaction.__post_init__ sets the expected default values for
missing fields.
"""
tx = Transaction(**tx_args)
for attr, val in expected_attributes_and_values:
assert hasattr(tx, attr)
assert getattr(tx, attr) == val
@pytest.mark.parametrize(
["withdrawals", "expected_root"],
[
pytest.param(
[],
bytes.fromhex("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421"),
id="empty-withdrawals",
),
pytest.param(
[
Withdrawal(
index=0,
validator_index=1,
address=0x1234,
amount=2,
)
],
bytes.fromhex("dc3ead883fc17ea3802cd0f8e362566b07b223f82e52f94c76cf420444b8ff81"),
id="single-withdrawal",
),
pytest.param(
[
Withdrawal(
index=0,
validator_index=1,
address=0x1234,
amount=2,
),
Withdrawal(
index=1,
validator_index=2,
address=0xABCD,
amount=0,
),
],
bytes.fromhex("069ab71e5d228db9b916880f02670c85682c46641bb9c95df84acc5075669e01"),
id="multiple-withdrawals",
),
pytest.param(
[
Withdrawal(
index=0,
validator_index=0,
address=0x100,
amount=0,
),
Withdrawal(
index=0,
validator_index=0,
address=0x200,
amount=0,
),
],
bytes.fromhex("daacd8fe889693f7d20436d9c0c044b5e92cc17b57e379997273fc67fd2eb7b8"),
id="multiple-withdrawals",
),
],
)
def test_withdrawals_root(withdrawals: List[Withdrawal], expected_root: bytes) -> None:
"""Test that withdrawals_root returns the expected hash."""
assert Withdrawal.list_root(withdrawals) == expected_root
@pytest.mark.parametrize(
"model",
[
Environment(),
],
ids=lambda model: model.__class__.__name__,
)
def test_model_copy(model: CopyValidateModel) -> None:
"""Test that the copy method returns a correct copy of the model."""
assert to_json(model.copy()) == to_json(model)
assert model.copy().model_fields_set == model.model_fields_set
@pytest.mark.parametrize(
"value, expected",
[
pytest.param(
Transaction().with_signature_and_sender(),
Bytes(
"0xf85f800a8252089400000000000000000000000000000000000000aa808026a0cc61d852649c34"
"cc0b71803115f38036ace257d2914f087bf885e6806a664fbda02020cb35f5d7731ab540d6261450"
"3a7f2344301a86342f67daf011c1341551ff"
),
id="type-0-transaction",
),
pytest.param(
Transaction(
access_list=[AccessList(address=0, storage_keys=[0, 1])],
).with_signature_and_sender(),
Bytes(
"0x01f8bd01800a8252089400000000000000000000000000000000000000aa8080f85bf859940000"
"000000000000000000000000000000000000f842a000000000000000000000000000000000000000"
"00000000000000000000000000a00000000000000000000000000000000000000000000000000000"
"00000000000180a0d48930fdc0183ff3e5f5a6d87cbdb8a719bfcd0396d22ef360166fb4cc35e42e"
"a063aba729e7a5f7b55c41b68dc6250769c98a25b5d21f5649576c5e79aa71a90e"
),
id="type-1-transaction",
),
pytest.param(
Transaction(
access_list=[AccessList(address=0, storage_keys=[0, 1])],
max_fee_per_gas=10,
max_priority_fee_per_gas=5,
).with_signature_and_sender(),
Bytes(
"0x02f8be0180050a8252089400000000000000000000000000000000000000aa8080f85bf8599400"
"00000000000000000000000000000000000000f842a0000000000000000000000000000000000000"
"0000000000000000000000000000a000000000000000000000000000000000000000000000000000"
"0000000000000180a0759123c15b9b06a9a063c9e9568e52631e8161cf663a5035505896070f67c3"
"21a0562291c94c89b5ab380c68fb8e254d34e373f4cd546a0ca3f40e455ce7072575"
),
id="type-2-transaction",
),
pytest.param(
Transaction(
access_list=[AccessList(address=1, storage_keys=[2, 3])],
max_fee_per_gas=10,
max_priority_fee_per_gas=5,
max_fee_per_blob_gas=20,
blob_versioned_hashes=[0, 1],
).with_signature_and_sender(),
Bytes(
"0x03f901030180050a8252089400000000000000000000000000000000000000aa8080f85bf85994"
"0000000000000000000000000000000000000001f842a00000000000000000000000000000000000"
"000000000000000000000000000002a0000000000000000000000000000000000000000000000000"
"000000000000000314f842a000000000000000000000000000000000000000000000000000000000"
"00000000a0000000000000000000000000000000000000000000000000000000000000000101a0cf"
"df45e03bb79a725059abfdff26243794e4f2cedc31cb951bae0064cb0d18ffa07af8ae0e4eb39dad"
"4f8210c49e3c81f4d2c50d0d94987122b788d17efa623de1"
),
id="type-3-transaction",
),
pytest.param(
Transaction(
access_list=[AccessList(address=0, storage_keys=[0, 1])],
max_fee_per_gas=10,
max_priority_fee_per_gas=5,
authorization_list=[
AuthorizationTuple(
address=0,
signer=EOA(key=TestPrivateKey),
),
],
).with_signature_and_sender(),
Bytes(
"0x04f9011c0180050a8252089400000000000000000000000000000000000000aa8080f85bf85994"
"0000000000000000000000000000000000000000f842a00000000000000000000000000000000000"
"000000000000000000000000000000a0000000000000000000000000000000000000000000000000"
"0000000000000001f85cf85a809400000000000000000000000000000000000000008080a0def12a"
"a13571bba668b619dc7523da4a44b4373f26ff19356a6b58a66217839fa0130454fb52ed23b604de"
"189d89b7b119698408a1cd80995959c8e3560aabb8ca80a051b5d457dfc118d4b0793c83c728c1ee"
"b9890ee98391493e8bb1c31855bcf3eca05d1d0c49babee471a39d63c9d5ca15f8e71051cc87335f"
"16d9bc7e4d56de278e"
),
id="type-4-transaction",
),
pytest.param(
Transaction(
access_list=[AccessList(address=0, storage_keys=[0, 1])],
max_fee_per_gas=10,
max_priority_fee_per_gas=5,
authorization_list=[
AuthorizationTuple(
address=0,
secret_key=TestPrivateKey,
),
],
).with_signature_and_sender(),
Bytes(
"0x04f9011c0180050a8252089400000000000000000000000000000000000000aa8080f85bf85994"
"0000000000000000000000000000000000000000f842a00000000000000000000000000000000000"
"000000000000000000000000000000a0000000000000000000000000000000000000000000000000"
"0000000000000001f85cf85a809400000000000000000000000000000000000000008080a0def12a"
"a13571bba668b619dc7523da4a44b4373f26ff19356a6b58a66217839fa0130454fb52ed23b604de"
"189d89b7b119698408a1cd80995959c8e3560aabb8ca80a051b5d457dfc118d4b0793c83c728c1ee"
"b9890ee98391493e8bb1c31855bcf3eca05d1d0c49babee471a39d63c9d5ca15f8e71051cc87335f"
"16d9bc7e4d56de278e"
),
id="type-4-transaction-auth-secret-key",
),
],
)
def test_serialization(value: Any, expected: Bytes) -> None:
"""Test `to_serializable_element` function."""
assert value.rlp().hex() == expected.hex()
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/tests/test_phase_manager.py | src/ethereum_test_types/tests/test_phase_manager.py | """Test suite for TestPhaseManager functionality."""
import pytest
from ethereum_test_base_types import Address
from ethereum_test_tools import Transaction
from ..phase_manager import TestPhase, TestPhaseManager
@pytest.fixture(autouse=True)
def reset_phase_manager() -> None:
"""Reset TestPhaseManager singleton state before each test."""
TestPhaseManager.reset()
def test_test_phase_enum_values() -> None:
"""Test that TestPhase enum has correct values."""
assert TestPhase.SETUP.value == "setup"
assert TestPhase.EXECUTION.value == "execution"
def test_phase_manager_class_state() -> None:
"""Test TestPhaseManager uses class-level state."""
# All access is through class methods, no instance needed
assert TestPhaseManager.get_current_phase() is None
# Setting phase through class method
with TestPhaseManager.setup():
assert TestPhaseManager.get_current_phase() == TestPhase.SETUP
# Phase persists at class level
assert TestPhaseManager.get_current_phase() is None
def test_default_phase_is_none() -> None:
"""Test that default phase is None (no context set)."""
assert TestPhaseManager.get_current_phase() is None
def test_transaction_auto_detects_default_phase() -> None:
"""Test that transactions default to None when no phase set."""
tx = Transaction(to=Address(0x123), value=100, gas_limit=21000)
assert tx.test_phase is None
def test_transaction_auto_detects_setup_phase() -> None:
"""Test that transactions created in setup context get SETUP phase."""
with TestPhaseManager.setup():
tx = Transaction(to=Address(0x456), value=50, gas_limit=21000)
assert tx.test_phase == TestPhase.SETUP
def test_phase_context_switching() -> None:
"""Test that phase switching works correctly."""
# Start with no phase set (defaults to None)
tx1 = Transaction(to=Address(0x100), value=100, gas_limit=21000)
assert tx1.test_phase is None
# Switch to SETUP
with TestPhaseManager.setup():
assert TestPhaseManager.get_current_phase() == TestPhase.SETUP
tx2 = Transaction(to=Address(0x200), value=200, gas_limit=21000)
assert tx2.test_phase == TestPhase.SETUP
# Back to None after context (transactions default to None)
assert TestPhaseManager.get_current_phase() is None
tx3 = Transaction(to=Address(0x300), value=300, gas_limit=21000)
assert tx3.test_phase is None
def test_nested_phase_contexts() -> None:
"""Test that nested phase contexts work correctly."""
with TestPhaseManager.setup():
tx1 = Transaction(to=Address(0x100), value=100, gas_limit=21000)
assert tx1.test_phase == TestPhase.SETUP
# Nested execution context
with TestPhaseManager.execution():
tx2 = Transaction(to=Address(0x200), value=200, gas_limit=21000)
assert tx2.test_phase == TestPhase.EXECUTION
# Back to setup after nested context
tx3 = Transaction(to=Address(0x300), value=300, gas_limit=21000)
assert tx3.test_phase == TestPhase.SETUP
@pytest.mark.parametrize(
["num_setup_txs", "num_exec_txs"],
[
pytest.param(0, 1, id="exec_only"),
pytest.param(1, 0, id="setup_only"),
pytest.param(3, 5, id="mixed"),
pytest.param(10, 10, id="many"),
],
)
def test_multiple_transactions_phase_tagging(num_setup_txs: int, num_exec_txs: int) -> None:
"""Test that multiple transactions are correctly tagged by phase."""
setup_txs = []
exec_txs = []
# Create setup transactions
with TestPhaseManager.setup():
for i in range(num_setup_txs):
tx = Transaction(to=Address(0x1000 + i), value=i * 10, gas_limit=21000)
setup_txs.append(tx)
# Create execution transactions
for i in range(num_exec_txs):
tx = Transaction(to=Address(0x2000 + i), value=i * 20, gas_limit=21000)
exec_txs.append(tx)
# Verify all setup transactions have SETUP phase
for tx in setup_txs:
assert tx.test_phase == TestPhase.SETUP
# Verify all execution transactions have None phase (no context set)
for tx in exec_txs:
assert tx.test_phase is None
def test_phase_reset() -> None:
"""Test that reset() restores default phase."""
# Change phase
with TestPhaseManager.setup():
pass
# Manually set to SETUP
TestPhaseManager._current_phase = TestPhase.SETUP
assert TestPhaseManager.get_current_phase() == TestPhase.SETUP
# Reset should restore None
TestPhaseManager.reset()
assert TestPhaseManager.get_current_phase() is None
def test_class_state_shared() -> None:
"""Test that phase state is shared at class level."""
# Phase changes are visible globally since it's class-level state
assert TestPhaseManager.get_current_phase() is None
with TestPhaseManager.setup():
# All access to the class sees the same phase
assert TestPhaseManager.get_current_phase() == TestPhase.SETUP
# Transactions created during this context get SETUP phase
tx = Transaction(to=Address(0x789), value=75, gas_limit=21000)
assert tx.test_phase == TestPhase.SETUP
# After context, phase returns to None
assert TestPhaseManager.get_current_phase() is None
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/tests/test_eof_v1.py | src/ethereum_test_types/tests/test_eof_v1.py | """Test suite for `code.eof.v1` module."""
from typing import List, Tuple
import pytest
from ethereum_test_base_types import to_json
from ethereum_test_base_types.pydantic import CopyValidateModel
from ethereum_test_vm import Opcodes as Op
from ..eof.v1 import AutoSection, Container, Section, SectionKind
test_cases: List[Tuple[str, Container, str]] = [
(
"No sections",
Container(
auto_data_section=False,
auto_type_section=AutoSection.NONE,
sections=[],
),
"ef0001 00",
),
(
"Single code section",
Container(
sections=[
Section.Code("0x00"),
],
),
"ef0001 01 0004 02 0001 0001 ff 0000 00 00800000 00",
),
(
"Single code section, single container section",
Container(
sections=[
Section.Code("0x0A"),
Section.Container("0x0B"),
],
),
"ef0001 01 0004 02 0001 0001 03 0001 00000001 ff 0000 00 00800000 0A 0B",
),
(
"Single code section, single container section, single data",
Container(
sections=[
Section.Code("0x0A"),
Section.Container("0x0B"),
Section.Data("0x0C"),
],
),
"ef0001 01 0004 02 0001 0001 03 0001 00000001 ff 0001 00 00800000 0A 0B 0C",
),
(
"Single code section, single container section, single data 2",
Container(
sections=[
Section.Code("0x0A"),
Section.Data("0x0C"),
Section.Container("0x0B"),
],
),
"ef0001 01 0004 02 0001 0001 03 0001 00000001 ff 0001 00 00800000 0A 0B 0C",
),
(
"Single code section, multiple container section, single data",
Container(
sections=[
Section.Code("0x0A"),
Section.Container("0x0B"),
Section.Data("0x0C"),
Section.Container("0x0D"),
],
),
"ef0001 01 0004 02 0001 0001 03 0002 00000001 00000001 ff 0001 00 00800000 0A 0B 0D 0C",
),
(
"Single code section, multiple container sections",
Container(
sections=[
Section.Code("0x00"),
Section.Container("0x0001"),
Section.Container("0x00"),
],
),
"ef0001 01 0004 02 0001 0001 03 0002 00000002 00000001 ff 0000 00 00800000 00 0001 00",
),
(
"No code section",
Container(
sections=[Section.Data("0x00")],
),
"ef0001 01 0000 ff 0001 00 00",
),
(
"Single data section",
Container(
auto_type_section=AutoSection.NONE,
sections=[
Section.Data("0x00"),
],
),
"ef0001 ff 0001 00 00",
),
(
"Custom invalid section",
Container(
auto_data_section=False,
auto_type_section=AutoSection.NONE,
sections=[
Section(
kind=0xFE,
data="0x00",
),
],
),
"ef0001 fe 0001 00 00",
),
(
"Multiple sections",
Container(
sections=[
Section.Code("0x0e"),
Section.Data("0x0f"),
],
),
"ef0001 01 0004 02 0001 0001 ff 0001 00 00800000 0e 0f",
),
(
"Multiple type sections",
Container(
sections=[
Section(
kind=SectionKind.TYPE,
data="0x00000000",
),
Section(
kind=SectionKind.TYPE,
data="0x00000000",
),
Section.Code("0x00"),
],
auto_type_section=AutoSection.NONE,
),
"ef0001 01 0004 01 0004 02 0001 0001 ff 0000 00 00000000 00000000 00",
),
(
"Invalid Magic",
Container(
magic=b"\xef\xfe",
sections=[
Section.Code("0x00"),
],
),
"effe01 01 0004 02 0001 0001 ff 0000 00 00800000 00",
),
(
"Invalid Version",
Container(
version=b"\x02",
sections=[
Section.Code("0x00"),
],
),
"ef0002 01 0004 02 0001 0001 ff 0000 00 00800000 00",
),
(
"Section Invalid size Version",
Container(
sections=[
Section.Code(
"0x00",
custom_size=0xFFFF,
),
],
),
"ef0001 01 0004 02 0001 ffff ff 0000 00 00800000 00",
),
(
"Nested EOF",
Container(
sections=[
Section.Code("0x00"),
Section(
kind=SectionKind.CONTAINER,
data=Container(
sections=[Section.Code("0x01")],
),
),
],
),
"ef0001 01 0004 02 0001 0001 03 0001 00000014 ff 0000 00 00800000 00"
"ef0001 01 0004 02 0001 0001 ff 0000 00 00800000 01",
),
(
"Nested EOF in Data",
Container(
sections=[
Section.Code("0x00"),
Section.Data(
data=Container(
sections=[Section.Code("0x01")],
),
),
],
),
"ef0001 01 0004 02 0001 0001 ff 0014 00 00800000 00"
"ef0001 01 0004 02 0001 0001 ff 0000 00 00800000 01",
),
(
"Incomplete code section",
Container(
sections=[
Section.Code(
code=b"",
custom_size=0x02,
),
],
),
"ef0001 01 0004 02 0001 0002 ff 0000 00 00800000",
),
(
"Trailing bytes after code section",
Container(
sections=[
Section.Code("0x600000"),
],
extra=bytes.fromhex("deadbeef"),
),
"ef0001 01 0004 02 0001 0003 ff 0000 00 00800000 600000 deadbeef",
),
(
"Multiple code sections",
Container(
sections=[
Section.Code("0x600000"),
Section.Code("0x600000"),
],
),
"""
ef0001 01 0008 02 0002 0003 0003 ff 0000 00
00800000 00800000
600000
600000
""",
),
(
"No section terminator",
Container(
sections=[
Section.Code("0x600000"),
],
header_terminator=bytes(),
),
"ef0001 01 0004 02 0001 0003 ff 0000 00800000 600000",
),
(
"No auto type section",
Container(
auto_type_section=AutoSection.NONE,
sections=[
Section.Code("0x00"),
],
),
"ef0001 02 0001 0001 ff 0000 00 00",
),
(
"Data section in types",
Container(
sections=[
Section.Code("0x00"),
Section.Data(
data="0x00",
force_type_listing=True,
),
],
),
"""
ef0001 01 0008 02 0001 0001 ff 0001 00
00800000 00800000
00 00
""",
),
(
"Code section inputs",
Container(
sections=[
Section.Code(
"0x00",
code_inputs=1,
max_stack_height=1,
),
],
),
"""
ef0001 01 0004 02 0001 0001 ff 0000 00
01800000
00
""",
),
(
"Code section inputs 2",
Container(
sections=[
Section.Code(
"0x00",
code_inputs=0xFF,
max_stack_height=0xFF,
),
],
),
"""
ef0001 01 0004 02 0001 0001 ff 0000 00
ff800000
00
""",
),
(
"Code section outputs",
Container(
sections=[
Section.Code(
"0x00",
code_outputs=1,
),
],
),
"""
ef0001 01 0004 02 0001 0001 ff 0000 00
00010000
00
""",
),
(
"Code section outputs 2",
Container(
sections=[
Section.Code(
"0x00",
code_outputs=0xFF,
),
],
),
"""
ef0001 01 0004 02 0001 0001 ff 0000 00
00ff0000
00
""",
),
(
"Code section max stack height",
Container(
sections=[
Section.Code(
"0x00",
max_stack_height=0x0201,
),
],
),
"""
ef0001 01 0004 02 0001 0001 ff 0000 00
00800201
00
""",
),
(
"Code section max stack height 2",
Container(
sections=[
Section.Code(
"0x00",
max_stack_height=0xFFFF,
),
],
),
"""
ef0001 01 0004 02 0001 0001 ff 0000 00
0080FFFF
00
""",
),
(
"Code section max stack height 3",
Container(
sections=[
Section.Code(
"0x00",
max_stack_height=0xFFFF,
),
Section.Code("0x00"),
],
),
"""
ef0001 01 0008 02 0002 0001 0001 ff 0000 00
0080FFFF 00800000
00
00
""",
),
(
"Custom type section",
Container(
sections=[
Section(
kind=SectionKind.TYPE,
data="0x00",
),
Section.Code("0x00"),
],
),
"ef0001 01 0001 02 0001 0001 ff 0000 00 00 00",
),
(
"EIP-4750 Single code section oversized type",
Container(
sections=[
Section(
kind=SectionKind.TYPE,
data="0x0000000000",
),
Section.Code("0x00"),
],
),
"ef0001 01 0005 02 0001 0001 ff 0000 00 0000000000 00",
),
(
"Empty type section",
Container(
sections=[
Section(kind=SectionKind.TYPE, data="0x"),
Section.Code("0x00"),
],
auto_type_section=AutoSection.NONE,
),
"ef0001 01 0000 02 0001 0001 ff 0000 00 00",
),
(
"Check that simple valid EOF1 deploys",
Container(
sections=[
Section.Code(
"0x305000",
code_inputs=0,
code_outputs=128, # Non returning
max_stack_height=1,
),
Section.Data("0xef"),
],
auto_type_section=AutoSection.AUTO,
),
"""
# EOF deployed code
ef0001 # Magic followed by version
010004 # One code segment
020001 # One code segment
0003 # code seg 0: 3 bytes
ff0001 # One byte data segment
00 # End of header
# Code segment 0 header
00 # Zero inputs
80 # Non-Returning Function
0001 # Max stack height 1
# Code segment 0 code
30 # 1 ADDRESS
50 # 2 POP
00 # 3 STOP
# Data segment
ef
""",
),
(
"Data Section custom_size parameter overwrites bytes size",
Container(
sections=[
Section.Code(
"0x305000",
code_inputs=0,
code_outputs=128, # Non returning
max_stack_height=1,
),
Section.Data("0x0bad", custom_size=4),
],
auto_type_section=AutoSection.AUTO,
),
"""
# EOF deployed code
ef0001 # Magic followed by version
010004 # One code segment
020001 # One code segment
0003 # code seg 0: 3 bytes
ff0004 # Four byte data segment
00 # End of header
# Code segment 0 header
00 # Zero inputs
80 # Non-Returning Function
0001 # Max stack height 1
# Code segment 0 code
30 # 1 ADDRESS
50 # 2 POP
00 # 3 STOP
# Data segment
0bad # 2 bytes instead of four
""",
),
(
"Multiple code segments",
Container(
sections=[
Section.Code(
"0x5f35e2030000000300060009e50001e50002e50003e3000400",
code_inputs=0,
code_outputs=128, # Non returning
max_stack_height=1,
),
Section.Code(
"0x5f5ff3",
code_inputs=0,
code_outputs=128, # Non returning
max_stack_height=2,
),
Section.Code(
"0x5f5ffd",
code_inputs=0,
code_outputs=128, # Non returning
max_stack_height=2,
),
Section.Code(
"0xfe",
code_inputs=0,
code_outputs=128, # Non returning
max_stack_height=0,
),
Section.Code(
"0xe4",
code_inputs=0,
code_outputs=0,
max_stack_height=0,
),
Section.Data("0x0bad60a7", custom_size=4),
],
auto_type_section=AutoSection.AUTO,
),
"""
# EOF deployed code
EF0001 # Magic and Version ( 1 )
010014 # Types length ( 20 )
020005 # Total code sections ( 5 )
0019 # Code section 0 , 25 bytes
0003 # Code section 1 , 3 bytes
0003 # Code section 2 , 3 bytes
0001 # Code section 3 , 1 bytes
0001 # Code section 4 , 1 bytes
ff0004 # Data section length ( 4 )
00 # Terminator (end of header)
# Code 0 types
00 # 0 inputs
80 # 0 outputs (Non-returning function)
0001 # max stack: 1
# Code 1 types
00 # 0 inputs
80 # 0 outputs (Non-returning function)
0002 # max stack: 2
# Code 2 types
00 # 0 inputs
80 # 0 outputs (Non-returning function)
0002 # max stack: 2
# Code 3 types
00 # 0 inputs
80 # 0 outputs (Non-returning function)
0000 # max stack: 0
# Code 4 types
00 # 0 inputs
00 # 0 outputs
0000 # max stack: 0
# Code section 0
5f # [0] PUSH0
35 # [1] CALLDATALOAD
e2030000000300060009 # [2] RJUMPV(0,3,6,9)
e50001 # [12] JUMPF(1)
e50002 # [15] JUMPF(2)
e50003 # [18] JUMPF(3)
e30004 # [21] CALLF(4)
00 # [24] STOP
# Code section 1
5f # [0] PUSH0
5f # [1] PUSH0
f3 # [2] RETURN
# Code section 2
5f # [0] PUSH0
5f # [1] PUSH0
fd # [2] REVERT
# Code section 3
fe # [0] INVALID
# Code section 4
e4 # [0] RETF
# Data section
0bad60a7
""",
),
(
"Custom Types Section overrides code",
Container(
sections=[
Section(kind=SectionKind.TYPE, data="0x00700002", custom_size=8),
Section(
kind=SectionKind.CODE,
code_inputs=0,
code_outputs=128, # Non returning
max_stack_height=1,
data="0x305000",
),
Section(kind=SectionKind.DATA, data="0x0bad60A7"),
],
),
"""
# EOF deployed code
ef0001 # Magic followed by version
010008 # Two code segments
020001 # One code segment
0003 # code seg 0: 3 bytes
ff0004 # Four byte data segment
00 # End of header
# Code segment 0 header
00 # Zero inputs
70 # Non-Returning Function
0002 # Max stack height 1
# Code segment 0 code
30 # 1 ADDRESS
50 # 2 POP
00 # 3 STOP
# Data segment
0bad60A7 # 4 bytes (valid)
""",
),
(
"Type section wrong order, but only in HEADER",
Container(
sections=[
Section(
kind=SectionKind.CODE,
code_inputs=0,
code_outputs=128, # Non returning
max_stack_height=1,
data="0x305000",
),
Section(
kind=SectionKind.TYPE,
data="0x00800001",
),
Section(kind=SectionKind.DATA, data="0xef"),
],
auto_sort_sections=AutoSection.ONLY_BODY,
),
"""
# EOF deployed code
ef0001 # Magic followed by version
020001 # One code segment
0003 # code seg 0: 3 bytes
010004 # One code segment
ff0001 # One byte data segment
00 # End of header
# Code segment 0 header
00 # Zero inputs
80 # Non-Returning Function
0001 # Max stack height 1
# Code segment 0 code
30 # 1 ADDRESS
50 # 2 POP
00 # 3 STOP
# Data segment
ef
""",
),
(
"Type section wrong order, but only in BODY",
Container(
sections=[
Section(
kind=SectionKind.CODE,
code_inputs=0,
code_outputs=128, # Non returning
max_stack_height=1,
data="0x305000",
),
Section(
kind=SectionKind.TYPE,
data="0x00800001",
),
Section(kind=SectionKind.DATA, data="0xef"),
],
auto_sort_sections=AutoSection.ONLY_HEADER,
),
"""
# EOF deployed code
ef0001 # Magic followed by version
010004 # One code segment
020001 # One code segment
0003 # code seg 0: 3 bytes
ff0001 # One byte data segment
00 # End of header
# Code segment 0 code
30 # 1 ADDRESS
50 # 2 POP
00 # 3 STOP
# Code segment 0 header
00 # Zero inputs
80 # Non-Returning Function
0001 # Max stack height 1
# Data segment
ef
""",
),
(
"Type section missing, but only in HEADER",
Container(
sections=[
Section(
kind=SectionKind.CODE,
code_inputs=0,
code_outputs=128, # Non returning
max_stack_height=1,
data="0x305000",
),
Section(kind=SectionKind.DATA, data="0xef"),
],
auto_type_section=AutoSection.ONLY_BODY,
),
"""
# EOF deployed code
ef0001 # Magic followed by version
020001 # One code segment
0003 # code seg 0: 3 bytes
ff0001 # One byte data segment
00 # End of header
# Code segment 0 header
00 # Zero inputs
80 # Non-Returning Function
0001 # Max stack height 1
# Code segment 0 code
30 # 1 ADDRESS
50 # 2 POP
00 # 3 STOP
# Data segment
ef
""",
),
(
"Type section missing, but only in BODY",
Container(
sections=[
Section(
kind=SectionKind.CODE,
code_inputs=0,
code_outputs=128, # Non returning
max_stack_height=1,
data="0x305000",
),
Section(kind=SectionKind.DATA, data="0xef"),
],
auto_type_section=AutoSection.ONLY_HEADER,
),
"""
# EOF deployed code
ef0001 # Magic followed by version
010004 # Types section
020001 # One code segment
0003 # code seg 0: 3 bytes
ff0001 # One byte data segment
00 # End of header
# Code segment 0 header
# Code segment 0 code
30 # 1 ADDRESS
50 # 2 POP
00 # 3 STOP
# Data segment
ef
""",
),
(
"Container.Init simple test",
Container.Init(deploy_container=Container.Code(b"\0")),
"""
# EOF deployed code
ef0001 # Magic followed by version
010004 # Types section
020001 # One code segment
0006 # code seg 0: 6 bytes
030001 # One container segment
00000014 # container seg 0: 20 bytes
ff0000 # Zero byte data segment
00 # End of header
0080 0002 # Types section
# Code segment 0 code
6000 # 1 PUSH1 0
6000 # 2 PUSH1 0
ee00 # 3 RETURNCODE[0]
# Subcontainer 0
ef0001 # Magic followed by version
010004 # Types section
020001 # One code segment
0001 # code seg 0: 1 byte
ff0000 # Zero byte data segment
00 # End of header
0080 0000 # Types section
# Code segment 0 code
00 # 1 STOP
""",
),
(
"Container.Init initcode prefix",
Container.Init(deploy_container=Container.Code(b"\0"), initcode_prefix=Op.SSTORE(0, 0)),
"""
# EOF deployed code
ef0001 # Magic followed by version
010004 # Types section
020001 # One code segment
000b # code seg 0: 11 bytes
030001 # One container segment
00000014 # container seg 0: 20 bytes
ff0000 # Zero byte data segment
00 # End of header
0080 0002 # Types section
# Code segment 0 code
6000 # 1 PUSH1 0
6000 # 2 PUSH1 0
55 # 3 SSTORE
6000 # 4 PUSH1 0
6000 # 5 PUSH1 0
ee00 # 6 RETURNCODE[0]
# Subcontainer 0
ef0001 # Magic followed by version
010004 # Types section
020001 # One code segment
0001 # code seg 0: 1 byte
ff0000 # Zero byte data segment
00 # End of header
0080 0000 # Types section
# Code segment 0 code
00 # 1 STOP
""",
),
]
@pytest.mark.parametrize(
["container", "hex_value"],
[(x[1], x[2]) for x in test_cases],
ids=[x[0] for x in test_cases],
)
def test_eof_v1_assemble(container: Container, hex_value: str) -> None:
"""Test `ethereum_test.types.code`."""
expected_string = remove_comments_from_string(hex_value)
expected_bytes = bytes.fromhex(expected_string.replace(" ", "").replace("\n", ""))
assert bytes(container) == expected_bytes, f"""
Container: {bytes(container).hex()}
Expected : {expected_bytes.hex()}
"""
def remove_comments_from_string(input_string: str) -> str:
"""Remove comments from a string and leave only valid hex characters."""
# Split the string into individual lines
lines = input_string.split("\n")
# Process each line to remove text following a '#'
cleaned_lines = []
for line in lines:
# Find the index of the first '#' character
comment_start = line.find("#")
# If a '#' is found, slice up to that point; otherwise, take the whole
# line
if comment_start != -1:
cleaned_line = line[:comment_start].rstrip()
else:
cleaned_line = line
# Only add non-empty lines if needed
if cleaned_line.strip():
cleaned_lines.append(cleaned_line)
# Join the cleaned lines back into a single string
cleaned_string = "\n".join(cleaned_lines)
return cleaned_string
@pytest.mark.parametrize(
"model",
[
Container(),
],
ids=lambda model: model.__class__.__name__,
)
def test_model_copy(model: CopyValidateModel) -> None:
"""Test that the copy method returns a correct copy of the model."""
assert to_json(model.copy()) == to_json(model)
assert model.copy().model_fields_set == model.model_fields_set
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/tests/__init__.py | src/ethereum_test_types/tests/__init__.py | """Tests for the ethereum_test_types package."""
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/tests/test_block_access_lists.py | src/ethereum_test_types/tests/test_block_access_lists.py | """Unit tests for BlockAccessListExpectation validation."""
from typing import Any
import pytest
from ethereum_test_base_types import Address, StorageKey
from ethereum_test_types.block_access_list import (
BalAccountAbsentValues,
BalAccountChange,
BalAccountExpectation,
BalBalanceChange,
BalCodeChange,
BalNonceChange,
BalStorageChange,
BalStorageSlot,
BlockAccessList,
BlockAccessListExpectation,
BlockAccessListValidationError,
)
def test_address_exclusion_validation_passes() -> None:
"""Test that address exclusion works when address is not in BAL."""
alice = Address(0xA)
bob = Address(0xB)
actual_bal = BlockAccessList(
[
BalAccountChange(
address=alice,
nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)],
),
]
)
expectation = BlockAccessListExpectation(
account_expectations={
alice: BalAccountExpectation(nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)]),
bob: None, # expect Bob is not in BAL (correctly)
}
)
expectation.verify_against(actual_bal)
def test_address_exclusion_validation_raises_when_address_is_present() -> None:
"""Test that validation fails when excluded address is in BAL."""
alice = Address(0xA)
bob = Address(0xB)
actual_bal = BlockAccessList(
[
BalAccountChange(
address=alice,
nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)],
),
BalAccountChange(
address=bob,
balance_changes=[BalBalanceChange(tx_index=1, post_balance=100)],
),
]
)
expectation = BlockAccessListExpectation(
# explicitly expect Bob to NOT be in BAL (wrongly)
account_expectations={bob: None},
)
with pytest.raises(BlockAccessListValidationError, match="should not be in BAL but was found"):
expectation.verify_against(actual_bal)
@pytest.mark.parametrize(
"empty_changes_definition,exception_message",
[
[BalAccountExpectation(), "ambiguous. Use BalAccountExpectation.empty()"],
[BalAccountExpectation.empty(), "No account changes expected for "],
],
ids=["BalAccountExpectation()", "BalAccountExpectation.empty()"],
)
def test_empty_account_changes_definitions(
empty_changes_definition: Any,
exception_message: str,
) -> None:
"""
Test that validation fails when expected empty changes but actual
has changes.
"""
alice = Address(0xA)
actual_bal = BlockAccessList(
[
BalAccountChange(
address=alice,
nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)],
),
]
)
expectation = BlockAccessListExpectation(
account_expectations={alice: empty_changes_definition}
)
with pytest.raises(BlockAccessListValidationError, match=exception_message):
expectation.verify_against(actual_bal)
def test_empty_list_validation() -> None:
"""Test that empty list validates correctly."""
alice = Address(0xA)
actual_bal = BlockAccessList(
[
BalAccountChange(
address=alice,
nonce_changes=[],
balance_changes=[],
code_changes=[],
storage_changes=[],
storage_reads=[],
),
]
)
expectation = BlockAccessListExpectation(
account_expectations={
alice: BalAccountExpectation(
nonce_changes=[],
balance_changes=[],
code_changes=[],
storage_changes=[],
storage_reads=[],
),
}
)
expectation.verify_against(actual_bal)
@pytest.mark.parametrize(
"field,value",
[
["nonce_changes", BalNonceChange(tx_index=1, post_nonce=1)],
["balance_changes", BalBalanceChange(tx_index=1, post_balance=100)],
["code_changes", BalCodeChange(tx_index=1, new_code=b"code")],
[
"storage_changes",
BalStorageSlot(
slot=0x01,
slot_changes=[BalStorageChange(tx_index=1, post_value=0x42)],
),
],
["storage_reads", 0x01],
],
)
def test_empty_list_validation_fails(field: str, value: Any) -> None:
"""Test that validation fails when expecting empty but field has values."""
alice = Address(0xA)
alice_acct_change = BalAccountChange(
address=alice,
storage_reads=[0x02],
)
if field == "storage_reads":
alice_acct_change.storage_reads = [value]
# set another field to non-empty to avoid all-empty account change
alice_acct_change.nonce_changes = [BalNonceChange(tx_index=1, post_nonce=1)]
else:
setattr(alice_acct_change, field, [value])
actual_bal = BlockAccessList([alice_acct_change])
alice_acct_expectation = BalAccountExpectation(
storage_reads=[0x02],
)
if field == "storage_reads":
alice_acct_expectation.storage_reads = []
# match the filled field in actual to avoid all-empty
# account expectation
alice_acct_expectation.nonce_changes = [BalNonceChange(tx_index=1, post_nonce=1)]
else:
setattr(alice_acct_expectation, field, [])
expectation = BlockAccessListExpectation(account_expectations={alice: alice_acct_expectation})
with pytest.raises(
BlockAccessListValidationError,
match=f"Expected {field} to be empty",
):
expectation.verify_against(actual_bal)
def test_partial_validation() -> None:
"""Test that unset fields are not validated."""
alice = Address(0xA)
# Actual BAL has multiple types of changes
actual_bal = BlockAccessList(
[
BalAccountChange(
address=alice,
nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)],
balance_changes=[BalBalanceChange(tx_index=1, post_balance=100)],
storage_reads=[0x01, 0x02],
),
]
)
# Only validate nonce changes, ignore balance and storage
expectation = BlockAccessListExpectation(
account_expectations={
alice: BalAccountExpectation(
nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)],
# balance_changes and storage_reads not set and won't be
# validated
),
}
)
expectation.verify_against(actual_bal)
def test_storage_changes_validation() -> None:
"""Test storage changes validation."""
contract = Address(0xC)
# Actual BAL with storage changes
actual_bal = BlockAccessList(
[
BalAccountChange(
address=contract,
storage_changes=[
BalStorageSlot(
slot=0x01,
slot_changes=[BalStorageChange(tx_index=1, post_value=0x42)],
)
],
),
]
)
# Expect the same storage changes
expectation = BlockAccessListExpectation(
account_expectations={
contract: BalAccountExpectation(
storage_changes=[
BalStorageSlot(
slot=0x01,
slot_changes=[BalStorageChange(tx_index=1, post_value=0x42)],
)
],
),
}
)
expectation.verify_against(actual_bal)
def test_missing_expected_address() -> None:
"""Test that validation fails when expected address is missing."""
alice = Address(0xA)
bob = Address(0xB)
actual_bal = BlockAccessList(
[
BalAccountChange(
address=alice,
nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)],
),
]
)
expectation = BlockAccessListExpectation(
account_expectations={
# wrongly expect Bob to be present
bob: BalAccountExpectation(
nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)],
),
}
)
with pytest.raises(
BlockAccessListValidationError, match="Expected address .* not found in actual BAL"
):
expectation.verify_against(actual_bal)
@pytest.mark.parametrize(
"addresses,error_message",
[
(
[
Address(0xB),
Address(0xA), # should come first
],
"BAL addresses are not in lexicographic order",
),
(
[
Address(0x1),
Address(0x3),
Address(0x2),
],
"BAL addresses are not in lexicographic order",
),
],
)
def test_actual_bal_address_ordering_validation(addresses: Any, error_message: str) -> None:
"""Test that actual BAL must have addresses in lexicographic order."""
# Create BAL with addresses in the given order
actual_bal = BlockAccessList(
[BalAccountChange(address=addr, nonce_changes=[]) for addr in addresses]
)
expectation = BlockAccessListExpectation(account_expectations={})
with pytest.raises(BlockAccessListValidationError, match=error_message):
expectation.verify_against(actual_bal)
@pytest.mark.parametrize(
"storage_slots,error_message",
[
(
[StorageKey(0x02), StorageKey(0x01)], # 0x02 before 0x01
"Storage slots not in ascending order",
),
(
[StorageKey(0x01), StorageKey(0x03), StorageKey(0x02)],
"Storage slots not in ascending order",
),
],
)
def test_actual_bal_storage_slot_ordering(storage_slots: Any, error_message: str) -> None:
"""Test that actual BAL must have storage slots in lexicographic order."""
addr = Address(0xA)
actual_bal = BlockAccessList(
[
BalAccountChange(
address=addr,
storage_changes=[
BalStorageSlot(slot=slot, slot_changes=[]) for slot in storage_slots
],
)
]
)
expectation = BlockAccessListExpectation(account_expectations={})
with pytest.raises(BlockAccessListValidationError, match=error_message):
expectation.verify_against(actual_bal)
@pytest.mark.parametrize(
"storage_reads,error_message",
[
([StorageKey(0x02), StorageKey(0x01)], "Storage reads not in ascending order"),
(
[StorageKey(0x01), StorageKey(0x03), StorageKey(0x02)],
"Storage reads not in ascending order",
),
],
)
def test_actual_bal_storage_reads_ordering(storage_reads: Any, error_message: str) -> None:
"""Test that actual BAL must have storage reads in lexicographic order."""
addr = Address(0xA)
actual_bal = BlockAccessList([BalAccountChange(address=addr, storage_reads=storage_reads)])
expectation = BlockAccessListExpectation(account_expectations={})
with pytest.raises(BlockAccessListValidationError, match=error_message):
expectation.verify_against(actual_bal)
@pytest.mark.parametrize(
"field_name",
["nonce_changes", "balance_changes", "code_changes"],
)
def test_actual_bal_tx_indices_ordering(field_name: str) -> None:
"""Test that actual BAL must have tx indices in ascending order."""
addr = Address(0xA)
tx_indices = [2, 3, 1] # out of order
changes: Any = []
if field_name == "nonce_changes":
changes = [BalNonceChange(tx_index=idx, post_nonce=1) for idx in tx_indices]
elif field_name == "balance_changes":
changes = [BalBalanceChange(tx_index=idx, post_balance=100) for idx in tx_indices]
elif field_name == "code_changes":
changes = [BalCodeChange(tx_index=idx, new_code=b"code") for idx in tx_indices]
actual_bal = BlockAccessList([BalAccountChange(address=addr, **{field_name: changes})])
expectation = BlockAccessListExpectation(account_expectations={})
with pytest.raises(
BlockAccessListValidationError,
match="Transaction indices not in ascending order",
):
expectation.verify_against(actual_bal)
@pytest.mark.parametrize(
"field_name",
["nonce_changes", "balance_changes", "code_changes"],
)
def test_actual_bal_duplicate_tx_indices(field_name: str) -> None:
"""
Test that actual BAL must not have duplicate tx indices in change lists.
"""
addr = Address(0xA)
# Duplicate tx_index=1
changes: Any = []
if field_name == "nonce_changes":
changes = [
BalNonceChange(tx_index=1, post_nonce=1),
BalNonceChange(tx_index=1, post_nonce=2), # duplicate tx_index
BalNonceChange(tx_index=2, post_nonce=3),
]
elif field_name == "balance_changes":
changes = [
BalBalanceChange(tx_index=1, post_balance=100),
BalBalanceChange(tx_index=1, post_balance=200), # duplicate tx_index
BalBalanceChange(tx_index=2, post_balance=300),
]
elif field_name == "code_changes":
changes = [
BalCodeChange(tx_index=1, new_code=b"code1"),
BalCodeChange(tx_index=1, new_code=b""), # duplicate tx_index
BalCodeChange(tx_index=2, new_code=b"code2"),
]
actual_bal = BlockAccessList([BalAccountChange(address=addr, **{field_name: changes})])
expectation = BlockAccessListExpectation(account_expectations={})
with pytest.raises(
BlockAccessListValidationError,
match=f"Duplicate transaction indices in {field_name}.*Duplicates: \\[1\\]",
):
expectation.verify_against(actual_bal)
def test_actual_bal_storage_duplicate_tx_indices() -> None:
"""
Test that storage changes must not have duplicate tx indices within same
slot.
"""
addr = Address(0xA)
# Create storage changes with duplicate tx_index within the same slot
actual_bal = BlockAccessList(
[
BalAccountChange(
address=addr,
storage_changes=[
BalStorageSlot(
slot=0x01,
slot_changes=[
BalStorageChange(tx_index=1, post_value=0x100),
BalStorageChange(tx_index=1, post_value=0x200), # duplicate tx_index
BalStorageChange(tx_index=2, post_value=0x300),
],
)
],
)
]
)
expectation = BlockAccessListExpectation(account_expectations={})
with pytest.raises(
BlockAccessListValidationError,
match="Duplicate transaction indices in storage slot.*Duplicates: \\[1\\]",
):
expectation.verify_against(actual_bal)
def test_expected_addresses_auto_sorted() -> None:
"""
Test that expected addresses are automatically sorted before comparison.
The BAL *Expectation address order should not matter for the dict. We DO,
however, validate that the actual BAL (from t8n) is sorted correctly.
"""
alice = Address(0xA)
bob = Address(0xB)
charlie = Address(0xC)
actual_bal = BlockAccessList(
[
BalAccountChange(address=alice, nonce_changes=[]),
BalAccountChange(address=bob, nonce_changes=[]),
BalAccountChange(address=charlie, nonce_changes=[]),
]
)
# expectation order should not matter for the dict though we DO validate
# that the _actual_ BAL (from t8n) is sorted correctly
expectation = BlockAccessListExpectation(
account_expectations={
charlie: BalAccountExpectation(nonce_changes=[]),
alice: BalAccountExpectation(nonce_changes=[]),
bob: BalAccountExpectation(nonce_changes=[]),
}
)
expectation.verify_against(actual_bal)
@pytest.mark.parametrize(
"expected_slots,should_pass",
[
# Correct order - should pass
([StorageKey(0x01), StorageKey(0x02), StorageKey(0x03)], True),
# Partial subset in correct order - should pass
([StorageKey(0x01), StorageKey(0x03)], True),
# Out of order - should fail
([StorageKey(0x01), StorageKey(0x03), StorageKey(0x02)], False),
# Wrong order from start - should fail
([StorageKey(0x02), StorageKey(0x01)], False),
],
)
def test_expected_storage_slots_ordering(expected_slots: Any, should_pass: bool) -> None:
"""Test that expected storage slots must be defined in correct order."""
addr = Address(0xA)
# Actual BAL with storage slots in correct order
actual_bal = BlockAccessList(
[
BalAccountChange(
address=addr,
storage_changes=[
BalStorageSlot(slot=StorageKey(0x01), slot_changes=[]),
BalStorageSlot(slot=StorageKey(0x02), slot_changes=[]),
BalStorageSlot(slot=StorageKey(0x03), slot_changes=[]),
],
)
]
)
expectation = BlockAccessListExpectation(
account_expectations={
addr: BalAccountExpectation(
storage_changes=[
BalStorageSlot(slot=slot, slot_changes=[]) for slot in expected_slots
],
),
}
)
if should_pass:
expectation.verify_against(actual_bal)
else:
with pytest.raises(
BlockAccessListValidationError,
match="not found or not in correct order",
):
expectation.verify_against(actual_bal)
@pytest.mark.parametrize(
"expected_reads,should_pass",
[
# Correct order - should pass
([StorageKey(0x01), StorageKey(0x02), StorageKey(0x03)], True),
# Partial subset in correct order - should pass
([StorageKey(0x02), StorageKey(0x03)], True),
# Out of order - should fail
([StorageKey(0x03), StorageKey(0x02)], False),
# Wrong order with all elements - should fail
([StorageKey(0x01), StorageKey(0x03), StorageKey(0x02)], False),
],
)
def test_expected_storage_reads_ordering(expected_reads: Any, should_pass: bool) -> None:
"""Test that expected storage reads must be defined in correct order."""
addr = Address(0xA)
# Actual BAL with storage reads in correct order
actual_bal = BlockAccessList(
[
BalAccountChange(
address=addr,
storage_reads=[StorageKey(0x01), StorageKey(0x02), StorageKey(0x03)],
)
]
)
expectation = BlockAccessListExpectation(
account_expectations={
addr: BalAccountExpectation(storage_reads=expected_reads),
}
)
if should_pass:
expectation.verify_against(actual_bal)
else:
with pytest.raises(
BlockAccessListValidationError,
match="not found or not in correct order",
):
expectation.verify_against(actual_bal)
@pytest.mark.parametrize(
"expected_tx_indices,should_pass",
[
# Correct order - should pass
([1, 2, 3], True),
# Partial subset in correct order - should pass
([1, 3], True),
# Single element - should pass
([2], True),
# Out of order - should fail
([2, 1], False),
# Wrong order with all elements - should fail
([1, 3, 2], False),
],
)
def test_expected_tx_indices_ordering(expected_tx_indices: Any, should_pass: bool) -> None:
"""Test that expected tx indices must be defined in correct order."""
addr = Address(0xA)
# actual BAL with tx indices in correct order
actual_bal = BlockAccessList(
[
BalAccountChange(
address=addr,
nonce_changes=[
BalNonceChange(tx_index=1, post_nonce=1),
BalNonceChange(tx_index=2, post_nonce=2),
BalNonceChange(tx_index=3, post_nonce=3),
],
)
]
)
expectation = BlockAccessListExpectation(
account_expectations={
addr: BalAccountExpectation(
nonce_changes=[
BalNonceChange(tx_index=idx, post_nonce=idx) for idx in expected_tx_indices
],
),
}
)
if should_pass:
expectation.verify_against(actual_bal)
else:
with pytest.raises(
BlockAccessListValidationError,
match="not found or not in correct order",
):
expectation.verify_against(actual_bal)
@pytest.mark.parametrize("has_change_should_raise", [True, False])
def test_absent_values_nonce_changes(has_change_should_raise: bool) -> None:
"""Test nonce_changes_at_tx validator with present/absent changes."""
alice = Address(0xA)
nonce_changes = [BalNonceChange(tx_index=1, post_nonce=1)]
if has_change_should_raise:
# add nonce change at tx 2 which should trigger failure
nonce_changes.append(BalNonceChange(tx_index=2, post_nonce=2))
actual_bal = BlockAccessList(
[
BalAccountChange(
address=alice,
nonce_changes=nonce_changes,
),
]
)
expectation = BlockAccessListExpectation(
account_expectations={
# no nonce changes at tx 2
alice: BalAccountExpectation(
absent_values=BalAccountAbsentValues(
nonce_changes=[BalNonceChange(tx_index=2, post_nonce=2)]
)
)
}
)
if has_change_should_raise:
with pytest.raises(Exception, match="Unexpected nonce change found at tx 0x2"):
expectation.verify_against(actual_bal)
else:
expectation.verify_against(actual_bal)
@pytest.mark.parametrize("has_change_should_raise", [True, False])
def test_absent_values_balance_changes(has_change_should_raise: bool) -> None:
"""Test balance_changes_at_tx validator with present/absent changes."""
alice = Address(0xA)
balance_changes = [BalBalanceChange(tx_index=1, post_balance=100)]
if has_change_should_raise:
# add balance change at tx 2 which should trigger failure
balance_changes.append(BalBalanceChange(tx_index=2, post_balance=200))
actual_bal = BlockAccessList(
[
BalAccountChange(
address=alice,
balance_changes=balance_changes,
),
]
)
expectation = BlockAccessListExpectation(
account_expectations={
alice: BalAccountExpectation(
absent_values=BalAccountAbsentValues(
balance_changes=[BalBalanceChange(tx_index=2, post_balance=200)]
)
),
}
)
if has_change_should_raise:
with pytest.raises(
Exception,
match="Unexpected balance change found at tx 0x2",
):
expectation.verify_against(actual_bal)
else:
expectation.verify_against(actual_bal)
@pytest.mark.parametrize("has_change_should_raise", [True, False])
def test_absent_values_storage_changes(has_change_should_raise: bool) -> None:
"""Test storage_changes_at_slots validator with present/absent changes."""
contract = Address(0xC)
storage_changes = [
BalStorageSlot(
slot=0x01,
slot_changes=[BalStorageChange(tx_index=1, post_value=0x99)],
)
]
if has_change_should_raise:
storage_changes.append(
BalStorageSlot(
slot=0x42,
slot_changes=[BalStorageChange(tx_index=1, post_value=0xBEEF)],
)
)
actual_bal = BlockAccessList(
[
BalAccountChange(
address=contract,
storage_changes=storage_changes,
),
]
)
expectation = BlockAccessListExpectation(
account_expectations={
# no storage changes at slot 0x42
contract: BalAccountExpectation(
absent_values=BalAccountAbsentValues(
storage_changes=[
BalStorageSlot(
slot=0x42,
slot_changes=[BalStorageChange(tx_index=1, post_value=0xBEEF)],
)
]
)
),
}
)
if has_change_should_raise:
with pytest.raises(Exception, match="Unexpected storage change found at slot"):
expectation.verify_against(actual_bal)
else:
expectation.verify_against(actual_bal)
@pytest.mark.parametrize("has_read_should_raise", [True, False])
def test_absent_values_storage_reads(has_read_should_raise: bool) -> None:
"""Test storage_reads_at_slots validator with present/absent reads."""
contract = Address(0xC)
# Create actual BAL with or without storage read at slot 0x42
storage_reads = [StorageKey(0x01)]
if has_read_should_raise:
storage_reads.append(StorageKey(0x42))
actual_bal = BlockAccessList(
[
BalAccountChange(
address=contract,
storage_reads=storage_reads,
),
]
)
expectation = BlockAccessListExpectation(
account_expectations={
# no storage reads at slot 0x42
contract: BalAccountExpectation(
absent_values=BalAccountAbsentValues(storage_reads=[StorageKey(0x42)])
),
}
)
if has_read_should_raise:
with pytest.raises(Exception, match="Unexpected storage read found at slot"):
expectation.verify_against(actual_bal)
else:
expectation.verify_against(actual_bal)
@pytest.mark.parametrize("has_change_should_raise", [True, False])
def test_absent_values_code_changes(has_change_should_raise: bool) -> None:
"""Test code_changes_at_tx validator with present/absent changes."""
alice = Address(0xA)
code_changes = [BalCodeChange(tx_index=1, new_code=b"\x00")]
if has_change_should_raise:
# add code change at tx 2 which should trigger failure
code_changes.append(BalCodeChange(tx_index=2, new_code=b"\x60\x00"))
actual_bal = BlockAccessList(
[
BalAccountChange(
address=alice,
code_changes=code_changes,
),
]
)
expectation = BlockAccessListExpectation(
account_expectations={
# no code changes at tx 2
alice: BalAccountExpectation(
absent_values=BalAccountAbsentValues(
code_changes=[BalCodeChange(tx_index=2, new_code=b"\x60\x00")]
)
),
}
)
if has_change_should_raise:
with pytest.raises(Exception, match="Unexpected code change found at tx 0x2"):
expectation.verify_against(actual_bal)
else:
expectation.verify_against(actual_bal)
def test_multiple_absent_valuess() -> None:
"""Test multiple absence validators working together."""
contract = Address(0xC)
actual_bal = BlockAccessList(
[
BalAccountChange(
address=contract,
nonce_changes=[],
balance_changes=[],
storage_changes=[
BalStorageSlot(
slot=0x01,
slot_changes=[BalStorageChange(tx_index=1, post_value=0x99)],
)
],
storage_reads=[StorageKey(0x01)],
code_changes=[],
),
]
)
# Test that multiple validators all pass
expectation = BlockAccessListExpectation(
account_expectations={
contract: BalAccountExpectation(
storage_changes=[
BalStorageSlot(
slot=0x01,
slot_changes=[BalStorageChange(tx_index=1, post_value=0x99)],
)
],
absent_values=BalAccountAbsentValues(
nonce_changes=[
BalNonceChange(tx_index=1, post_nonce=0),
BalNonceChange(tx_index=2, post_nonce=0),
],
balance_changes=[
BalBalanceChange(tx_index=1, post_balance=0),
BalBalanceChange(tx_index=2, post_balance=0),
],
storage_changes=[
BalStorageSlot(
slot=0x42, slot_changes=[BalStorageChange(tx_index=1, post_value=0)]
),
BalStorageSlot(
slot=0x43, slot_changes=[BalStorageChange(tx_index=1, post_value=0)]
),
],
storage_reads=[StorageKey(0x42), StorageKey(0x43)],
code_changes=[
BalCodeChange(tx_index=1, new_code=b""),
BalCodeChange(tx_index=2, new_code=b""),
],
),
),
}
)
expectation.verify_against(actual_bal)
def test_absent_values_with_multiple_tx_indices() -> None:
"""Test absence validators with multiple transaction indices."""
alice = Address(0xA)
actual_bal = BlockAccessList(
[
BalAccountChange(
address=alice,
nonce_changes=[
# nonce changes at tx 1 and 3
BalNonceChange(tx_index=1, post_nonce=1),
BalNonceChange(tx_index=3, post_nonce=2),
],
),
]
)
expectation = BlockAccessListExpectation(
account_expectations={
alice: BalAccountExpectation(
nonce_changes=[
BalNonceChange(tx_index=1, post_nonce=1),
BalNonceChange(tx_index=3, post_nonce=2),
],
absent_values=BalAccountAbsentValues(
nonce_changes=[
BalNonceChange(tx_index=2, post_nonce=0),
BalNonceChange(tx_index=4, post_nonce=0),
]
),
),
}
)
expectation.verify_against(actual_bal)
expectation_fail = BlockAccessListExpectation(
account_expectations={
alice: BalAccountExpectation(
absent_values=BalAccountAbsentValues(
nonce_changes=[
# wrongly forbid change at txs 1 and 2
# (1 exists, so should fail)
BalNonceChange(tx_index=1, post_nonce=1),
BalNonceChange(tx_index=2, post_nonce=0),
]
),
),
}
)
with pytest.raises(Exception, match="Unexpected nonce change found at tx 0x1"):
expectation_fail.verify_against(actual_bal)
def test_bal_account_absent_values_comprehensive() -> None:
"""Test comprehensive BalAccountAbsentValues usage."""
addr = Address(0xA)
# Test forbidding nonce changes
actual_bal = BlockAccessList(
[BalAccountChange(address=addr, nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)])]
)
expectation = BlockAccessListExpectation(
account_expectations={
addr: BalAccountExpectation(
absent_values=BalAccountAbsentValues(
nonce_changes=[BalNonceChange(tx_index=1, post_nonce=1)]
)
),
}
)
with pytest.raises(
BlockAccessListValidationError, match="Unexpected nonce change found at tx"
):
expectation.verify_against(actual_bal)
# Test forbidding balance changes
actual_bal = BlockAccessList(
[
BalAccountChange(
address=addr, balance_changes=[BalBalanceChange(tx_index=2, post_balance=100)]
)
]
)
expectation = BlockAccessListExpectation(
account_expectations={
addr: BalAccountExpectation(
absent_values=BalAccountAbsentValues(
balance_changes=[BalBalanceChange(tx_index=2, post_balance=100)]
)
),
}
)
with pytest.raises(
BlockAccessListValidationError, match="Unexpected balance change found at tx"
):
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | true |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/tests/test_helpers.py | src/ethereum_test_types/tests/test_helpers.py | """Test suite for `ethereum_test.helpers` module."""
import pytest
from ethereum_test_base_types import Address
from ..helpers import compute_create2_address, compute_create_address
def test_address() -> None:
"""Test `ethereum_test.base_types.Address`."""
assert (
Address("0x0000000000000000000000000000000000000000")
== "0x0000000000000000000000000000000000000000"
)
assert Address(0) == "0x0000000000000000000000000000000000000000"
assert Address(1) == "0x0000000000000000000000000000000000000001"
assert Address(10) == "0x000000000000000000000000000000000000000a"
assert (
Address("0x0000000000000000000000000000000000000010")
== "0x0000000000000000000000000000000000000010"
)
assert Address(2 ** (20 * 8) - 1) == "0xffffffffffffffffffffffffffffffffffffffff"
assert Address(0) == Address(0)
assert Address(0) != Address(1)
@pytest.mark.parametrize(
"address,nonce,expected_contract_address",
[
pytest.param(
"0x00caa64684700d2825da7cac6ba0c6ed9fd2a1bb",
0,
"0x863df6bfa4469f3ead0be8f9f2aae51c91a907b4",
id="zero-nonce-0x-str-address",
),
pytest.param(
"00caa64684700d2825da7cac6ba0c6ed9fd2a1bb",
0,
"0x863df6bfa4469f3ead0be8f9f2aae51c91a907b4",
id="zero-nonce-str-address",
),
pytest.param(
int("0x00caa64684700d2825da7cac6ba0c6ed9fd2a1bb", 16),
0,
"0x863df6bfa4469f3ead0be8f9f2aae51c91a907b4",
id="zero-nonce-int-address",
),
pytest.param(
"0x9c33eacc2f50e39940d3afaf2c7b8246b681a374",
3,
"0x7a250d5630b4cf539739df2c5dacb4c659f2488d",
id="non-zero-nonce-0x-str-address",
),
pytest.param(
"0xba52c75764d6f594735dc735be7f1830cdf58ddf",
3515,
"0x06012c8cf97bead5deae237070f9587f8e7a266d",
id="large-nonce-0x-str-address",
marks=pytest.mark.xfail(
reason="Nonce too large to convert with hard-coded to_bytes length of 1"
),
),
],
)
def test_compute_create_address(
address: str | int, nonce: int, expected_contract_address: str
) -> None:
"""
Test `ethereum_test.helpers.compute_create_address` with some famous
contracts:
- https://etherscan.io/address/0x863df6bfa4469f3ead0be8f9f2aae51c91a907b4
- https://etherscan.io/address/0x7a250d5630b4cf539739df2c5dacb4c659f2488d
- https://etherscan.io/address/0x06012c8cf97bead5deae237070f9587f8e7a266d.
"""
assert compute_create_address(address=address, nonce=nonce) == expected_contract_address
@pytest.mark.parametrize(
"address,salt,initcode,expected_contract_address",
[
pytest.param(
"0x0000000000000000000000000000000000000000",
"0x0000000000000000000000000000000000000000",
"0x00",
"0x4d1a2e2bb4f88f0250f26ffff098b0b30b26bf38",
),
pytest.param(
"0xdeadbeef00000000000000000000000000000000",
"0x0000000000000000000000000000000000000000",
"0x00",
"0xB928f69Bb1D91Cd65274e3c79d8986362984fDA3",
),
pytest.param(
"0xdeadbeef00000000000000000000000000000000",
"0xfeed000000000000000000000000000000000000",
"0x00",
"0xD04116cDd17beBE565EB2422F2497E06cC1C9833",
),
pytest.param(
"0x0000000000000000000000000000000000000000",
"0x0000000000000000000000000000000000000000",
"0xdeadbeef",
"0x70f2b2914A2a4b783FaEFb75f459A580616Fcb5e",
),
pytest.param(
"0x00000000000000000000000000000000deadbeef",
"0xcafebabe",
"0xdeadbeef",
"0x60f3f640a8508fC6a86d45DF051962668E1e8AC7",
),
pytest.param(
"0x00000000000000000000000000000000deadbeef",
"0xcafebabe",
(
"0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef"
"deadbeefdeadbeefdeadbeefdeadbeefdeadbeef"
),
"0x1d8bfDC5D46DC4f61D6b6115972536eBE6A8854C",
),
pytest.param(
"0x0000000000000000000000000000000000000000",
"0x0000000000000000000000000000000000000000",
"0x",
"0xE33C0C7F7df4809055C3ebA6c09CFe4BaF1BD9e0",
),
],
)
def test_compute_create2_address(
address: str | int,
salt: str,
initcode: str,
expected_contract_address: str,
) -> None:
"""
Test `ethereum_test.helpers.compute_create2_address` using the CREATE2 geth
test cases from:
https://github.com/ethereum/go-ethereum/blob/2189773093b2fe6d161b6477589f964470ff5bce/core/vm/instructions_test.go.
Note: `compute_create2_address` does not generate checksum addresses.
"""
salt_as_int = int(salt, 16)
initcode_as_bytes = bytes.fromhex(initcode[2:])
assert (
compute_create2_address(address, salt_as_int, initcode_as_bytes)
== expected_contract_address.lower()
)
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/eof/constants.py | src/ethereum_test_types/eof/constants.py | """EVM Object Format generic constants. Applicable to all EOF versions."""
EOF_MAGIC = b"\xef\x00"
"""
The second byte found on every EOF formatted contract, which was chosen to
avoid clashes with three contracts which were deployed on Mainnet.
"""
EOF_HEADER_TERMINATOR = b"\x00"
"""Byte that terminates the header of the EOF format."""
LATEST_EOF_VERSION = 1
"""Latest existing EOF version."""
VERSION_BYTE_LENGTH = 1
"""Length of the version byte."""
MAX_RUNTIME_STACK_HEIGHT = 1024
"""
Maximum height of the EVM runtime operand stack. Exceeding this value during
execution will result in the stack overflow exception. This value applies to
both legacy EVM and EOF.
"""
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/eof/__init__.py | src/ethereum_test_types/eof/__init__.py | """EVM Object Format Library to generate bytecode for testing purposes."""
from .constants import LATEST_EOF_VERSION
__all__ = ("LATEST_EOF_VERSION",)
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/eof/v1/constants.py | src/ethereum_test_types/eof/v1/constants.py | """
EVM Object Format generic constants.
Applicable to EOF version 1.
"""
from ..constants import VERSION_BYTE_LENGTH
VERSION_NUMBER = 0x01
VERSION_NUMBER_BYTES = VERSION_NUMBER.to_bytes(VERSION_BYTE_LENGTH, "big")
MAX_CODE_SECTIONS = 1024
MAX_RETURN_STACK_HEIGHT = 1024
MAX_STACK_INCREASE_LIMIT = 0x3FF
"""
Maximum value for the max stack increase accepted by the EOF format.
"""
MAX_CODE_INPUTS = 127
MAX_CODE_OUTPUTS = 127
NON_RETURNING_SECTION = 0x80
MAX_RJUMPV_COUNT = 0xFF
MAX_BYTECODE_SIZE = 24576
MAX_INITCODE_SIZE = MAX_BYTECODE_SIZE * 2
HEADER_SECTION_KIND_BYTE_LENGTH = 1
HEADER_SECTION_COUNT_BYTE_LENGTH = 2
HEADER_SECTION_SIZE_BYTE_LENGTH = 2
HEADER_SECTION_CONTAINER_SIZE_BYTE_LENGTH = 4
TYPES_INPUTS_BYTE_LENGTH = 1
TYPES_OUTPUTS_BYTE_LENGTH = 1
TYPES_STACK_BYTE_LENGTH = 2
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_types/eof/v1/__init__.py | src/ethereum_test_types/eof/v1/__init__.py | """
EVM Object Format Version 1 Library to generate bytecode for testing purposes.
"""
from dataclasses import dataclass
from enum import Enum, IntEnum, auto
from functools import cached_property
from typing import Any, Dict, List, Optional, Tuple
from pydantic import Field, GetCoreSchemaHandler
from pydantic_core.core_schema import (
PlainValidatorFunctionSchema,
no_info_plain_validator_function,
to_string_ser_schema,
)
from ethereum_test_base_types import Bytes, Hash
from ethereum_test_base_types.conversions import BytesConvertible
from ethereum_test_base_types.pydantic import CopyValidateModel
from ethereum_test_exceptions.exceptions import EOFExceptionInstanceOrList
from ethereum_test_vm import Bytecode
from ethereum_test_vm import Opcodes as Op
from ..constants import EOF_HEADER_TERMINATOR, EOF_MAGIC
from .constants import (
HEADER_SECTION_CONTAINER_SIZE_BYTE_LENGTH,
HEADER_SECTION_COUNT_BYTE_LENGTH,
HEADER_SECTION_KIND_BYTE_LENGTH,
HEADER_SECTION_SIZE_BYTE_LENGTH,
NON_RETURNING_SECTION,
TYPES_INPUTS_BYTE_LENGTH,
TYPES_OUTPUTS_BYTE_LENGTH,
TYPES_STACK_BYTE_LENGTH,
VERSION_NUMBER_BYTES,
)
class SectionKind(IntEnum):
"""Enum class of V1 valid section kind values."""
TYPE = 1
CODE = 2
CONTAINER = 3
DATA = 0xFF
def __str__(self) -> str:
"""Return string representation of the section kind."""
return self.name
class ContainerKind(Enum):
"""Enum class of V1 valid container kind values."""
RUNTIME = auto()
INITCODE = auto()
@staticmethod
def __get_pydantic_core_schema__(
source_type: Any, handler: GetCoreSchemaHandler
) -> PlainValidatorFunctionSchema:
"""
Call class constructor without info and appends the serialization
schema.
"""
return no_info_plain_validator_function(
source_type.from_str,
serialization=to_string_ser_schema(),
)
@staticmethod
def from_str(value: "str | ContainerKind | None") -> "ContainerKind | None":
"""Return ContainerKind enum value from a string."""
if value is None:
return None
if isinstance(value, ContainerKind):
return value
return ContainerKind[value.upper()]
def __str__(self) -> str:
"""Return string representation of the container kind."""
return self.name
class AutoSection(Enum):
"""Enum class for auto section generation approach."""
AUTO = 1
ONLY_HEADER = 2
ONLY_BODY = 3
NONE = 4
def any(self) -> bool:
"""Return True if the enum is not NONE."""
return self != AutoSection.NONE
def header(self) -> bool:
"""Return True if the enum is not ONLY_BODY."""
return self != AutoSection.ONLY_BODY and self != AutoSection.NONE
def body(self) -> bool:
"""Return True if the enum is not ONLY_HEADER."""
return self != AutoSection.ONLY_HEADER and self != AutoSection.NONE
SUPPORT_MULTI_SECTION_HEADER = [SectionKind.CODE, SectionKind.CONTAINER]
class Section(CopyValidateModel):
"""Class that represents a section in an EOF V1 container."""
data: Bytes = Bytes(b"")
"""
Data to be contained by this section. Can be SupportsBytes, another EOF
container or any other abstract data.
"""
custom_size: int = 0
"""
Custom size value to be used in the header. If unset, the header is built
with length of the data.
"""
kind: SectionKind | int
"""
Kind of section that is represented by this object. Can be any `int`
outside of the values defined by `SectionKind` for testing purposes.
"""
force_type_listing: bool = False
"""
Forces this section to appear in the TYPE section at the beginning of the
container.
"""
code_inputs: int = 0
"""Data stack items consumed by this code section (function)"""
code_outputs: int = NON_RETURNING_SECTION
"""
Data stack items produced by or expected at the end of this code section
(function)
"""
max_stack_increase: int | None = None
"""Maximum operand stack height increase above the code section inputs."""
max_stack_height: int | None = None
"""Maximum height data stack reaches during execution of code section."""
auto_max_stack_height: bool = False
"""
Whether to automatically compute the best suggestion for the
max_stack_height value for this code section.
"""
auto_code_inputs_outputs: bool = False
"""
Whether to automatically compute the best suggestion for the code_inputs,
code_outputs values for this code section.
"""
skip_header_listing: bool = False
"""Skip section from listing in the header"""
skip_body_listing: bool = False
"""Skip section from listing in the body"""
skip_types_body_listing: bool = False
"""
Skip section from listing in the types body (input, output, stack) bytes
"""
skip_types_header_listing: bool = False
"""
Skip section from listing in the types header (not calculating input,
output, stack size)
"""
@cached_property
def header(self) -> bytes:
"""Get formatted header for this section according to its contents."""
size = self.custom_size if "custom_size" in self.model_fields_set else len(self.data)
if self.kind == SectionKind.CODE:
raise Exception("Need container-wide view of code sections to generate header")
return self.kind.to_bytes(
HEADER_SECTION_KIND_BYTE_LENGTH, byteorder="big"
) + size.to_bytes(HEADER_SECTION_SIZE_BYTE_LENGTH, byteorder="big")
@cached_property
def type_definition(self) -> bytes:
"""Returns a serialized type section entry for this section."""
if self.kind != SectionKind.CODE and not self.force_type_listing:
return bytes()
code_inputs, code_outputs, max_stack_increase, max_stack_height = (
self.code_inputs,
self.code_outputs,
self.max_stack_increase,
self.max_stack_height,
)
if self.auto_max_stack_height or self.auto_code_inputs_outputs:
(
auto_code_inputs,
auto_code_outputs,
auto_max_height,
) = compute_code_stack_values(self.data)
if self.auto_code_inputs_outputs:
code_inputs, code_outputs = (
auto_code_inputs,
auto_code_outputs,
)
if self.auto_max_stack_height:
max_stack_increase = auto_max_height - code_inputs
if max_stack_increase is not None:
assert max_stack_height is None
elif max_stack_height is not None:
max_stack_increase = max_stack_height - code_inputs
else:
max_stack_increase = 0
assert max_stack_increase >= 0, "incorrect max stack height value"
return (
code_inputs.to_bytes(length=TYPES_INPUTS_BYTE_LENGTH, byteorder="big")
+ code_outputs.to_bytes(length=TYPES_OUTPUTS_BYTE_LENGTH, byteorder="big")
+ max_stack_increase.to_bytes(length=TYPES_STACK_BYTE_LENGTH, byteorder="big")
)
def with_max_stack_height(self, max_stack_height: int) -> "Section":
"""
Create copy of the section with `max_stack_height` set to the specified
value.
"""
return self.copy(max_stack_height=max_stack_height)
def with_auto_max_stack_height(self) -> "Section":
"""
Create copy of the section with `auto_max_stack_height` set to True.
"""
return self.copy(auto_max_stack_height=True)
def with_auto_code_inputs_outputs(self) -> "Section":
"""
Create copy of the section with `auto_code_inputs_outputs` set to True.
"""
return self.copy(auto_code_inputs_outputs=True)
@staticmethod
def list_header(sections: List["Section"]) -> bytes:
"""
Create single code header for all code sections contained in the list.
"""
# Allow 'types section' to use skip_header_listing flag
if sections[0].skip_header_listing:
return b""
if sections[0].kind not in SUPPORT_MULTI_SECTION_HEADER:
return b"".join(s.header for s in sections)
h = sections[0].kind.to_bytes(HEADER_SECTION_KIND_BYTE_LENGTH, "big")
# Count only those sections that are not marked to be skipped for
# header calculation
header_registered_sections = 0
for cs in sections:
if not cs.skip_header_listing:
header_registered_sections += 1
h += header_registered_sections.to_bytes(HEADER_SECTION_COUNT_BYTE_LENGTH, "big")
for cs in sections:
# If section is marked to skip the header calculation, don't make
# header for it
if cs.skip_header_listing:
continue
size = cs.custom_size if "custom_size" in cs.model_fields_set else len(cs.data)
body_size_length = (
HEADER_SECTION_SIZE_BYTE_LENGTH
if cs.kind != SectionKind.CONTAINER
else HEADER_SECTION_CONTAINER_SIZE_BYTE_LENGTH
)
h += size.to_bytes(body_size_length, "big")
return h
@classmethod
def Code( # noqa: N802
cls, code: Optional[BytesConvertible | Bytecode] = None, **kwargs: Any
) -> "Section":
"""Create new code section with the specified code."""
if code is None:
code = Bytecode()
kwargs.pop("kind", None)
if (
"max_stack_height" not in kwargs
and "max_stack_increase" not in kwargs
and isinstance(code, Bytecode)
):
# If not specified, take the max_stack_increase from the Bytecode.
kwargs["max_stack_increase"] = code.max_stack_height - kwargs.get("code_inputs", 0)
return cls(kind=SectionKind.CODE, data=code, **kwargs)
@classmethod
def Container( # noqa: N802
cls, container: "Container" | BytesConvertible, **kwargs: Any
) -> "Section":
"""Create new container section with the specified container."""
kwargs.pop("kind", None)
return cls(kind=SectionKind.CONTAINER, data=container, **kwargs)
@classmethod
def Data(cls, data: BytesConvertible = b"", **kwargs: Any) -> "Section": # noqa: N802
"""Create new data section with the specified data."""
kwargs.pop("kind", None)
return cls(kind=SectionKind.DATA, data=data, **kwargs)
class Container(CopyValidateModel):
"""Class that represents an EOF V1 container."""
name: Optional[str] = None
"""Name of the container"""
sections: List[Section] = Field(default_factory=list)
"""List of sections in the container"""
magic: Bytes = Bytes(EOF_MAGIC)
"""
Custom magic value used to override the mandatory EOF value for testing
purposes.
"""
version: Bytes = Bytes(VERSION_NUMBER_BYTES)
"""
Custom version value used to override the mandatory EOF V1 value for
testing purposes.
"""
header_terminator: Bytes = Bytes(EOF_HEADER_TERMINATOR)
"""Bytes used to terminate the header."""
extra: Bytes = Bytes(b"")
"""
Extra data to be appended at the end of the container, which will not be
considered part of any of the sections, for testing purposes.
"""
auto_type_section: AutoSection = AutoSection.AUTO
"""
Automatically generate a `TYPE` section based on the included `CODE` kind
sections.
"""
auto_data_section: bool = True
"""Automatically generate a `DATA` section."""
auto_sort_sections: AutoSection = AutoSection.AUTO
"""
Automatically sort sections for the header and body: Headers: type section
first, all code sections, container sections, last data section(s) Body:
type section first, all code sections, data section(s), last container
sections
"""
skip_join_concurrent_sections_in_header: bool = False
"""Skip joining concurrent sections in the header (code and container)"""
validity_error: EOFExceptionInstanceOrList | str | None = None
"""Optional error expected for the container. TODO: Remove str"""
kind: ContainerKind = ContainerKind.RUNTIME
"""Kind type of the container."""
raw_bytes: Optional[Bytes] = None
"""
Optional raw bytes that represent the container. Used to have a cohesive
type among all test cases, even those that do not resemble a valid EOF V1
container.
"""
expected_bytecode: Optional[Bytes] = None
"""
Optional raw bytes of the expected constructed bytecode. This allows
confirming that raw EOF and Container() representations are identical.
"""
@cached_property
def bytecode(self) -> bytes:
"""Converts the EOF V1 Container into bytecode."""
if self.raw_bytes is not None:
assert len(self.sections) == 0
return self.raw_bytes
c = self.magic + self.version
# Prepare auto-generated sections
sections = self.sections
# Add type section if needed
if self.auto_type_section.any() and count_sections(sections, SectionKind.TYPE) == 0:
# Calculate skipping flags
types_header_size = 0
type_section_data = b""
for s in sections:
types_header_size += (
len(s.type_definition) if not s.skip_types_header_listing else 0
)
type_section_data += s.type_definition if not s.skip_types_body_listing else b""
sections = [
Section(
kind=SectionKind.TYPE, data=type_section_data, custom_size=types_header_size
)
] + sections
# Add data section if needed
if self.auto_data_section and count_sections(sections, SectionKind.DATA) == 0:
sections = sections + [Section(kind=SectionKind.DATA, data="0x")]
header_sections = [
s
for s in sections
if s.kind != SectionKind.TYPE or self.auto_type_section != AutoSection.ONLY_BODY
]
if self.auto_sort_sections.header():
header_sections.sort(key=lambda x: x.kind)
# Add headers
if header_sections:
# Join headers of the same kind in a list of lists, only if they
# are next to each other
concurrent_sections: List[List[Section]] = [[header_sections[0]]]
for s in header_sections[1:]:
if (
s.kind == concurrent_sections[-1][-1].kind
and not self.skip_join_concurrent_sections_in_header
):
concurrent_sections[-1].append(s)
else:
concurrent_sections.append([s])
c += b"".join(Section.list_header(cs) for cs in concurrent_sections)
# Add header terminator
c += self.header_terminator
body_sections = sections[:]
if self.auto_sort_sections.body():
# Sort sections for the body
body_sections.sort(key=lambda x: x.kind)
# Add section bodies
for s in body_sections:
if s.kind == SectionKind.TYPE and self.auto_type_section == AutoSection.ONLY_HEADER:
continue
if s.data and not s.skip_body_listing:
c += s.data
# Add extra (garbage)
c += self.extra
# Check if the constructed bytecode matches the expected one
if self.expected_bytecode is not None:
assert c == self.expected_bytecode
return c
@classmethod
def Code(cls, code: Optional[BytesConvertible] = None, **kwargs: Any) -> "Container": # noqa: N802
"""Create simple container with a single code section."""
if code is None:
code = Bytecode()
kwargs.pop("kind", None)
return cls(sections=[Section.Code(code=code, **kwargs)])
@classmethod
def Init( # noqa: N802
cls,
deploy_container: "Container",
initcode_prefix: Optional[Bytecode] = None,
) -> "Container":
"""
Create simple init container that deploys the specified container.
"""
if initcode_prefix is None:
initcode_prefix = Bytecode()
return cls(
sections=[
Section.Code(
code=initcode_prefix + Op.RETURNCODE[0](0, 0),
),
Section.Container(
container=deploy_container,
),
],
)
@cached_property
def hash(self) -> Hash:
"""Returns hash of the container bytecode."""
return Bytes(self.bytecode).keccak256()
def __bytes__(self) -> bytes:
"""Return bytecode of the container."""
return self.bytecode
def __len__(self) -> int:
"""Return length of the container bytecode."""
return len(self.bytecode)
def __str__(self) -> str:
"""
Return name of the container if available, otherwise the bytecode of
the container as a string.
"""
if self.name:
return self.name
return str(self.bytecode)
@dataclass(kw_only=True)
class Initcode(Bytecode):
"""
Helper class used to generate initcode for the specified deployment code,
using EOF V1 container as init code.
"""
name: str = "EOF V1 Initcode"
"""Name used to identify the initcode."""
deploy_container: Container
"""Container to be deployed."""
@cached_property
def init_container(self) -> Container:
"""Generate a container that will be used as the initcode."""
return Container(
sections=[
Section.Code(
code=Op.RETURNCODE[0](0, 0),
max_stack_height=2,
),
Section.Container(
container=self.deploy_container,
),
],
)
@cached_property
def bytecode(self) -> bytes:
"""
Generate an EOF container performs `EOFCREATE` with the specified code.
"""
initcode = Container(
sections=[
Section.Code(
# TODO: Pass calldata
code=Op.EOFCREATE[0](0, 0, 0, 0) + Op.STOP(),
max_stack_height=4,
),
Section.Container(
container=self.init_container,
),
]
)
return bytes(initcode)
def count_sections(sections: List[Section], kind: SectionKind | int) -> int:
"""Count sections from a list that match a specific kind."""
return len([s for s in sections if s.kind == kind])
OPCODE_MAP: Dict[int, Op] = {x.int(): x for x in Op}
def compute_code_stack_values(code: bytes) -> Tuple[int, int, int]:
"""
Compute stack values for the given bytecode.
TODO: THIS DOES NOT WORK WHEN THE RJUMP* JUMPS BACKWARDS (and many other
things).
"""
i = 0
stack_height = 0
min_stack_height = 0
max_stack_height = 0
# compute type annotation
while i < len(code):
op = OPCODE_MAP.get(code[i])
if op is None:
return (0, 0, 0)
elif op == Op.RJUMPV:
i += 1
if i < len(code):
count = code[i]
i += count * 2
else:
i += 1 + op.data_portion_length
stack_height -= op.popped_stack_items
min_stack_height = min(stack_height, min_stack_height)
stack_height += op.pushed_stack_items
max_stack_height = max(stack_height, max_stack_height)
if stack_height < 0:
stack_height = 0
return (abs(min_stack_height), stack_height, max_stack_height)
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_rpc/rpc.py | src/ethereum_test_rpc/rpc.py | """
JSON-RPC methods and helper functions for EEST consume based hive simulators.
"""
import logging
import os
import time
from itertools import count
from pprint import pprint
from typing import Any, ClassVar, Dict, List, Literal
import requests
from jwt import encode
from pydantic import ValidationError
from tenacity import (
before_sleep_log,
retry,
retry_if_exception_type,
stop_after_attempt,
wait_exponential,
)
from ethereum_test_base_types import Address, Bytes, Hash, to_json
from ethereum_test_types import Transaction
from pytest_plugins.custom_logging import get_logger
from .rpc_types import (
EthConfigResponse,
ForkchoiceState,
ForkchoiceUpdateResponse,
GetBlobsResponse,
GetPayloadResponse,
JSONRPCError,
PayloadAttributes,
PayloadStatus,
TransactionByHashResponse,
)
logger = get_logger(__name__)
BlockNumberType = int | Literal["latest", "earliest", "pending"]
class SendTransactionExceptionError(Exception):
"""
Represent an exception that is raised when a transaction fails to be sent.
"""
tx: Transaction | None = None
tx_rlp: Bytes | None = None
def __init__(
self, *args: Any, tx: Transaction | None = None, tx_rlp: Bytes | None = None
) -> None:
"""
Initialize SendTransactionExceptionError class with the given
transaction.
"""
super().__init__(*args)
self.tx = tx
self.tx_rlp = tx_rlp
def __str__(self) -> str:
"""Return string representation of the exception."""
base = super().__str__()
if self.tx is not None:
return f"{base} Transaction={self.tx.model_dump_json()}"
elif self.tx_rlp is not None:
return f"{base} Transaction RLP={self.tx_rlp.hex()}"
return base
class BaseRPC:
"""
Represents a base RPC class for every RPC call used within EEST based hive
simulators.
"""
namespace: ClassVar[str]
response_validation_context: Any | None
def __init__(
self,
url: str,
*,
response_validation_context: Any | None = None,
):
"""Initialize BaseRPC class with the given url."""
self.url = url
self.request_id_counter = count(1)
self.response_validation_context = response_validation_context
def __init_subclass__(cls, namespace: str | None = None) -> None:
"""
Set namespace of the RPC class to the lowercase of the class name.
"""
if namespace is None:
namespace = cls.__name__
if namespace.endswith("RPC"):
namespace = namespace.removesuffix("RPC")
namespace = namespace.lower()
cls.namespace = namespace
@retry(
retry=retry_if_exception_type((requests.ConnectionError, ConnectionRefusedError)),
stop=stop_after_attempt(5),
wait=wait_exponential(multiplier=0.5, min=0.5, max=4.0),
before_sleep=before_sleep_log(logger, logging.WARNING),
reraise=True,
)
def _make_request(
self,
url: str,
json_payload: dict[str, Any],
headers: dict[str, str],
timeout: int | None,
) -> requests.Response:
"""
Make HTTP POST request with retry logic for connection errors only.
This method only retries network-level connection failures
(ConnectionError, ConnectionRefusedError). HTTP status errors (4xx/5xx)
are handled by the caller using response.raise_for_status() WITHOUT
retries because:
- 4xx errors are client errors (permanent failures, no point retrying)
- 5xx errors are server errors that typically indicate
application-level issues rather than transient network problems
"""
logger.debug(f"Making HTTP request to {url}, timeout={timeout}")
return requests.post(url, json=json_payload, headers=headers, timeout=timeout)
def post_request(
self,
*,
method: str,
params: List[Any] | None = None,
extra_headers: Dict[str, str] | None = None,
request_id: int | str | None = None,
timeout: int | None = None,
) -> Any:
"""
Send JSON-RPC POST request to the client RPC server at port defined in
the url.
"""
if extra_headers is None:
extra_headers = {}
if params is None:
params = []
assert self.namespace, "RPC namespace not set"
next_request_id_counter = next(self.request_id_counter)
if request_id is None:
request_id = next_request_id_counter
payload = {
"jsonrpc": "2.0",
"method": f"{self.namespace}_{method}",
"params": params,
"id": request_id,
}
base_header = {
"Content-Type": "application/json",
}
headers = base_header | extra_headers
logger.debug(
f"Sending RPC request to {self.url}, method={self.namespace}_{method}, "
f"timeout={timeout}..."
)
response = self._make_request(self.url, payload, headers, timeout)
response.raise_for_status()
response_json = response.json()
if "error" in response_json:
raise JSONRPCError(**response_json["error"])
assert "result" in response_json, "RPC response didn't contain a result field"
result = response_json["result"]
return result
class EthRPC(BaseRPC):
"""
Represents an `eth_X` RPC class for every default ethereum RPC method used
within EEST based hive simulators.
"""
transaction_wait_timeout: int = 60
poll_interval: float = 1.0 # how often to poll for tx inclusion
BlockNumberType = int | Literal["latest", "earliest", "pending"]
def __init__(
self,
*args: Any,
transaction_wait_timeout: int = 60,
poll_interval: float | None = None,
**kwargs: Any,
) -> None:
"""
Initialize EthRPC class with the given url and transaction wait
timeout.
"""
super().__init__(*args, **kwargs)
self.transaction_wait_timeout = transaction_wait_timeout
# Allow overriding via env "flag" EEST_POLL_INTERVAL or ctor arg
# Priority: ctor arg > env var > default (1.0)
env_val = os.getenv("EEST_POLL_INTERVAL")
if poll_interval is not None:
self.poll_interval = float(poll_interval)
elif env_val:
try:
self.poll_interval = float(env_val)
except ValueError:
logger.warning("Invalid EEST_POLL_INTERVAL=%r; falling back to 1.0s", env_val)
self.poll_interval = 1.0
else:
self.poll_interval = 1.0
def config(self, timeout: int | None = None) -> EthConfigResponse | None:
"""
`eth_config`: Returns information about a fork configuration of the
client.
"""
try:
response = self.post_request(method="config", timeout=timeout)
if response is None:
logger.warning("eth_config request: failed to get response")
return None
return EthConfigResponse.model_validate(
response, context=self.response_validation_context
)
except ValidationError as e:
pprint(e.errors())
raise e
except Exception as e:
logger.debug(f"exception occurred when sending JSON-RPC request: {e}")
raise e
def chain_id(self) -> int:
"""`eth_chainId`: Returns the current chain id."""
response = self.post_request(method="chainId", timeout=10)
return int(response, 16)
def get_block_by_number(
self, block_number: BlockNumberType = "latest", full_txs: bool = True
) -> Any | None:
"""
`eth_getBlockByNumber`: Returns information about a block by block
number.
"""
block = hex(block_number) if isinstance(block_number, int) else block_number
params = [block, full_txs]
response = self.post_request(method="getBlockByNumber", params=params)
return response
def get_block_by_hash(self, block_hash: Hash, full_txs: bool = True) -> Any | None:
"""`eth_getBlockByHash`: Returns information about a block by hash."""
params = [f"{block_hash}", full_txs]
response = self.post_request(method="getBlockByHash", params=params)
return response
def get_balance(self, address: Address, block_number: BlockNumberType = "latest") -> int:
"""
`eth_getBalance`: Returns the balance of the account of given address.
"""
block = hex(block_number) if isinstance(block_number, int) else block_number
params = [f"{address}", block]
response = self.post_request(method="getBalance", params=params)
return int(response, 16)
def get_code(self, address: Address, block_number: BlockNumberType = "latest") -> Bytes:
"""`eth_getCode`: Returns code at a given address."""
block = hex(block_number) if isinstance(block_number, int) else block_number
params = [f"{address}", block]
response = self.post_request(method="getCode", params=params)
return Bytes(response)
def get_transaction_count(
self, address: Address, block_number: BlockNumberType = "latest"
) -> int:
"""
`eth_getTransactionCount`: Returns the number of transactions sent from
an address.
"""
block = hex(block_number) if isinstance(block_number, int) else block_number
params = [f"{address}", block]
response = self.post_request(method="getTransactionCount", params=params)
return int(response, 16)
def get_transaction_by_hash(self, transaction_hash: Hash) -> TransactionByHashResponse | None:
"""`eth_getTransactionByHash`: Returns transaction details."""
try:
response = self.post_request(
method="getTransactionByHash", params=[f"{transaction_hash}"]
)
if response is None:
return None
return TransactionByHashResponse.model_validate(
response, context=self.response_validation_context
)
except ValidationError as e:
pprint(e.errors())
raise e
def get_transaction_receipt(self, transaction_hash: Hash) -> dict[str, Any] | None:
"""
`eth_getTransactionReceipt`: Returns transaction receipt.
Used to get the actual gas used by a transaction for gas validation
in benchmark tests.
"""
response = self.post_request(
method="getTransactionReceipt", params=[f"{transaction_hash}"]
)
return response
def get_storage_at(
self, address: Address, position: Hash, block_number: BlockNumberType = "latest"
) -> Hash:
"""
`eth_getStorageAt`: Returns the value from a storage position at a
given address.
"""
block = hex(block_number) if isinstance(block_number, int) else block_number
params = [f"{address}", f"{position}", block]
response = self.post_request(method="getStorageAt", params=params)
return Hash(response)
def gas_price(self) -> int:
"""
`eth_gasPrice`: Returns the number of transactions sent from an
address.
"""
response = self.post_request(method="gasPrice")
return int(response, 16)
def send_raw_transaction(
self, transaction_rlp: Bytes, request_id: int | str | None = None
) -> Hash:
"""`eth_sendRawTransaction`: Send a transaction to the client."""
try:
response = self.post_request(
method="sendRawTransaction",
params=[transaction_rlp.hex()],
request_id=request_id,
)
result_hash = Hash(response)
assert result_hash is not None
return result_hash
except Exception as e:
raise SendTransactionExceptionError(str(e), tx_rlp=transaction_rlp) from e
def send_transaction(self, transaction: Transaction) -> Hash:
"""`eth_sendRawTransaction`: Send a transaction to the client."""
# TODO: is this a copypaste error from above?
try:
response = self.post_request(
method="sendRawTransaction",
params=[transaction.rlp().hex()],
request_id=transaction.metadata_string(),
)
result_hash = Hash(response)
assert result_hash == transaction.hash
assert result_hash is not None
return transaction.hash
except Exception as e:
raise SendTransactionExceptionError(str(e), tx=transaction) from e
def send_transactions(self, transactions: List[Transaction]) -> List[Hash]:
"""
Use `eth_sendRawTransaction` to send a list of transactions to the
client.
"""
return [self.send_transaction(tx) for tx in transactions]
def storage_at_keys(
self, account: Address, keys: List[Hash], block_number: BlockNumberType = "latest"
) -> Dict[Hash, Hash]:
"""
Retrieve the storage values for the specified keys at a given address
and block number.
"""
results: Dict[Hash, Hash] = {}
for key in keys:
storage_value = self.get_storage_at(account, key, block_number)
results[key] = storage_value
return results
def wait_for_transaction(self, transaction: Transaction) -> TransactionByHashResponse:
"""
Use `eth_getTransactionByHash` to wait until a transaction is included
in a block.
"""
tx_hash = transaction.hash
start_time = time.time()
while True:
tx = self.get_transaction_by_hash(tx_hash)
if tx is not None and tx.block_number is not None:
return tx
if (time.time() - start_time) > self.transaction_wait_timeout:
break
time.sleep(self.poll_interval)
raise Exception(
f"Transaction {tx_hash} ({transaction.model_dump_json()}) not included in a "
f"block after {self.transaction_wait_timeout} seconds"
)
def wait_for_transactions(
self, transactions: List[Transaction]
) -> List[TransactionByHashResponse]:
"""
Use `eth_getTransactionByHash` to wait until all transactions in list
are included in a block.
"""
tx_hashes = [tx.hash for tx in transactions]
responses: List[TransactionByHashResponse] = []
start_time = time.time()
while True:
i = 0
while i < len(tx_hashes):
tx_hash = tx_hashes[i]
tx = self.get_transaction_by_hash(tx_hash)
if tx is not None and tx.block_number is not None:
responses.append(tx)
tx_hashes.pop(i)
else:
i += 1
if not tx_hashes:
return responses
if (time.time() - start_time) > self.transaction_wait_timeout:
break
time.sleep(self.poll_interval)
missing_txs_strings = [
f"{tx.hash} ({tx.model_dump_json()})" for tx in transactions if tx.hash in tx_hashes
]
raise Exception(
f"Transactions {', '.join(missing_txs_strings)} not included in a block "
f"after {self.transaction_wait_timeout} seconds"
)
def send_wait_transaction(self, transaction: Transaction) -> Any:
"""Send transaction and waits until it is included in a block."""
self.send_transaction(transaction)
return self.wait_for_transaction(transaction)
def send_wait_transactions(self, transactions: List[Transaction]) -> List[Any]:
"""
Send list of transactions and waits until all of them are included in a
block.
"""
self.send_transactions(transactions)
return self.wait_for_transactions(transactions)
class DebugRPC(EthRPC):
"""
Represents an `debug_X` RPC class for every default ethereum RPC method
used within EEST based hive simulators.
"""
def trace_call(self, tr: dict[str, str], block_number: str) -> Any | None:
"""`debug_traceCall`: Returns pre state required for transaction."""
params = [tr, block_number, {"tracer": "prestateTracer"}]
return self.post_request(method="traceCall", params=params)
class EngineRPC(BaseRPC):
"""
Represents an Engine API RPC class for every Engine API method used within
EEST based hive simulators.
"""
jwt_secret: bytes
def __init__(
self,
*args: Any,
jwt_secret: bytes = b"secretsecretsecretsecretsecretse", # Default secret used in hive
**kwargs: Any,
) -> None:
"""Initialize Engine RPC class with the given JWT secret."""
super().__init__(*args, **kwargs)
self.jwt_secret = jwt_secret
def post_request(
self,
*,
method: str,
params: Any | None = None,
extra_headers: Dict[str, str] | None = None,
request_id: int | str | None = None,
timeout: int | None = None,
) -> Any:
"""
Send JSON-RPC POST request to the client RPC server at port defined in
the url.
"""
if extra_headers is None:
extra_headers = {}
jwt_token = encode(
{"iat": int(time.time())},
self.jwt_secret,
algorithm="HS256",
)
extra_headers = {
"Authorization": f"Bearer {jwt_token}",
} | extra_headers
return super().post_request(
method=method,
params=params,
extra_headers=extra_headers,
timeout=timeout,
request_id=request_id,
)
def new_payload(self, *params: Any, version: int) -> PayloadStatus:
"""
`engine_newPayloadVX`: Attempts to execute the given payload on an
execution client.
"""
method = f"newPayloadV{version}"
params_list = [to_json(param) for param in params]
return PayloadStatus.model_validate(
self.post_request(method=method, params=params_list),
context=self.response_validation_context,
)
def forkchoice_updated(
self,
forkchoice_state: ForkchoiceState,
payload_attributes: PayloadAttributes | None = None,
*,
version: int,
) -> ForkchoiceUpdateResponse:
"""
`engine_forkchoiceUpdatedVX`: Updates the forkchoice state of the
execution client.
"""
method = f"forkchoiceUpdatedV{version}"
if payload_attributes is None:
params = [to_json(forkchoice_state), None]
else:
params = [to_json(forkchoice_state), to_json(payload_attributes)]
return ForkchoiceUpdateResponse.model_validate(
self.post_request(
method=method,
params=params,
),
context=self.response_validation_context,
)
def get_payload(
self,
payload_id: Bytes,
*,
version: int,
) -> GetPayloadResponse:
"""
`engine_getPayloadVX`: Retrieves a payload that was requested through
`engine_forkchoiceUpdatedVX`.
"""
method = f"getPayloadV{version}"
return GetPayloadResponse.model_validate(
self.post_request(
method=method,
params=[f"{payload_id}"],
),
context=self.response_validation_context,
)
def get_blobs(
self,
versioned_hashes: List[Hash],
*,
version: int,
) -> GetBlobsResponse | None:
"""
`engine_getBlobsVX`: Retrieves blobs from an execution layers tx pool.
"""
method = f"getBlobsV{version}"
params = [f"{h}" for h in versioned_hashes]
response = self.post_request(
method=method,
params=[params],
)
if response is None: # for tests that request non-existing blobs
logger.debug("get_blobs response received but it has value: None")
return None
return GetBlobsResponse.model_validate(
response,
context=self.response_validation_context,
)
class NetRPC(BaseRPC):
"""Represents a net RPC class for network-related RPC calls."""
def peer_count(self) -> int:
"""`net_peerCount`: Get the number of peers connected to the client."""
response = self.post_request(method="peerCount")
return int(response, 16) # hex -> int
class AdminRPC(BaseRPC):
"""Represents an admin RPC class for administrative RPC calls."""
def add_peer(self, enode: str) -> bool:
"""`admin_addPeer`: Add a peer by enode URL."""
return self.post_request(method="addPeer", params=[enode])
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_rpc/__init__.py | src/ethereum_test_rpc/__init__.py | """
JSON-RPC methods and helper functions for EEST consume based hive simulators.
"""
from .rpc import (
AdminRPC,
BlockNumberType,
DebugRPC,
EngineRPC,
EthRPC,
NetRPC,
SendTransactionExceptionError,
)
from .rpc_types import (
BlobAndProofV1,
BlobAndProofV2,
EthConfigResponse,
ForkConfig,
ForkConfigBlobSchedule,
)
__all__ = [
"AdminRPC",
"BlobAndProofV1",
"BlobAndProofV2",
"BlockNumberType",
"DebugRPC",
"EngineRPC",
"EthConfigResponse",
"EthRPC",
"ForkConfig",
"ForkConfigBlobSchedule",
"NetRPC",
"SendTransactionExceptionError",
]
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_rpc/rpc_types.py | src/ethereum_test_rpc/rpc_types.py | """Types used in the RPC module for `eth` and `engine` namespaces' requests."""
import json
from binascii import crc32
from enum import Enum
from hashlib import sha256
from typing import Annotated, Any, Dict, List, Self
from pydantic import AliasChoices, Field, model_validator
from ethereum_test_base_types import (
Address,
Bytes,
CamelModel,
EthereumTestRootModel,
ForkBlobSchedule,
ForkHash,
Hash,
HexNumber,
)
from ethereum_test_exceptions import (
BlockException,
ExceptionMapperValidator,
ExceptionWithMessage,
TransactionException,
UndefinedException,
)
from ethereum_test_fixtures.blockchain import FixtureExecutionPayload
from ethereum_test_types import EOA, Transaction, Withdrawal
class JSONRPCError(Exception):
"""Model to parse a JSON RPC error response."""
code: int
message: str
data: str | None
def __init__(self, code: int | str, message: str, data: str | None = None) -> None:
"""Initialize the JSONRPCError."""
self.code = int(code)
self.message = message
self.data = data
def __str__(self) -> str:
"""Return string representation of the JSONRPCError."""
if self.data is not None:
return f"JSONRPCError(code={self.code}, message={self.message}, data={self.data})"
return f"JSONRPCError(code={self.code}, message={self.message})"
class TransactionByHashResponse(Transaction):
"""Represents the response of a transaction by hash request."""
block_hash: Hash | None = None
block_number: HexNumber | None = None
gas_limit: HexNumber = Field(HexNumber(21_000), alias="gas")
transaction_hash: Hash = Field(..., alias="hash")
sender: EOA | None = Field(None, alias="from")
# The to field can have different names in different clients, so we use
# AliasChoices.
to: Address | None = Field(..., validation_alias=AliasChoices("to_address", "to", "toAddress"))
v: HexNumber = Field(0, validation_alias=AliasChoices("v", "yParity")) # type: ignore
@model_validator(mode="before")
@classmethod
def adapt_clients_response(cls, data: Any) -> Any:
"""
Perform modifications necessary to adapt the response returned by
clients so it can be parsed by our model.
"""
if isinstance(data, dict):
if "gasPrice" in data and "maxFeePerGas" in data:
# Keep only one of the gas price fields.
del data["gasPrice"]
return data
def model_post_init(self, __context: Any) -> None:
"""
Check that the transaction hash returned by the client matches the one
calculated by us.
"""
Transaction.model_post_init(self, __context)
assert self.transaction_hash == self.hash
class ForkchoiceState(CamelModel):
"""Represents the forkchoice state of the beacon chain."""
head_block_hash: Hash = Field(Hash(0))
safe_block_hash: Hash = Field(Hash(0))
finalized_block_hash: Hash = Field(Hash(0))
class PayloadStatusEnum(str, Enum):
"""Represents the status of a payload after execution."""
VALID = "VALID"
INVALID = "INVALID"
SYNCING = "SYNCING"
ACCEPTED = "ACCEPTED"
INVALID_BLOCK_HASH = "INVALID_BLOCK_HASH"
class BlockTransactionExceptionWithMessage(
ExceptionWithMessage[BlockException | TransactionException] # type: ignore
):
"""Exception returned from the execution client with a message."""
pass
class PayloadStatus(CamelModel):
"""Represents the status of a payload after execution."""
status: PayloadStatusEnum
latest_valid_hash: Hash | None
validation_error: (
Annotated[
BlockTransactionExceptionWithMessage | UndefinedException, ExceptionMapperValidator
]
| None
)
class ForkchoiceUpdateResponse(CamelModel):
"""Represents the response of a forkchoice update."""
payload_status: PayloadStatus
payload_id: Bytes | None
class PayloadAttributes(CamelModel):
"""Represents the attributes of a payload."""
timestamp: HexNumber
prev_randao: Hash
suggested_fee_recipient: Address
withdrawals: List[Withdrawal] | None = None
parent_beacon_block_root: Hash | None = None
class BlobsBundle(CamelModel):
"""Represents the bundle of blobs."""
commitments: List[Bytes]
proofs: List[Bytes]
blobs: List[Bytes]
def blob_versioned_hashes(self, versioned_hash_version: int = 1) -> List[Hash]:
"""Return versioned hashes of the blobs."""
versioned_hashes: List[Hash] = []
for commitment in self.commitments:
commitment_hash = sha256(commitment).digest()
versioned_hash = Hash(bytes([versioned_hash_version]) + commitment_hash[1:])
versioned_hashes.append(versioned_hash)
return versioned_hashes
class BlobAndProofV1(CamelModel):
"""Represents a blob and single-proof structure (< Osaka)."""
blob: Bytes
proof: Bytes
class BlobAndProofV2(CamelModel):
"""Represents a blob and cell proof structure (>= Osaka)."""
blob: Bytes
proofs: List[Bytes]
class GetPayloadResponse(CamelModel):
"""Represents the response of a get payload request."""
execution_payload: FixtureExecutionPayload
blobs_bundle: BlobsBundle | None = None
execution_requests: List[Bytes] | None = None
class GetBlobsResponse(EthereumTestRootModel[List[BlobAndProofV1 | BlobAndProofV2 | None]]):
"""Represents the response of a get blobs request."""
root: List[BlobAndProofV1 | BlobAndProofV2 | None]
def __len__(self) -> int:
"""Return the number of blobs in the response."""
return len(self.root)
def __getitem__(self, index: int) -> BlobAndProofV1 | BlobAndProofV2 | None:
"""Return the blob at the given index."""
return self.root[index]
class ForkConfigBlobSchedule(CamelModel):
"""Representation of the blob schedule of a given fork."""
target_blobs_per_block: int = Field(..., alias="target")
max_blobs_per_block: int = Field(..., alias="max")
base_fee_update_fraction: int
@classmethod
def from_fork_blob_schedule(cls, fork_blob_schedule: ForkBlobSchedule) -> Self:
"""Create a ForkConfigBlobSchedule from a ForkBlobSchedule."""
return cls(
target_blobs_per_block=fork_blob_schedule.target_blobs_per_block,
max_blobs_per_block=fork_blob_schedule.max_blobs_per_block,
base_fee_update_fraction=fork_blob_schedule.base_fee_update_fraction,
)
class ForkConfig(CamelModel):
"""Current or next fork config information."""
activation_time: int
blob_schedule: ForkConfigBlobSchedule | None = None
chain_id: HexNumber
fork_id: ForkHash
precompiles: Dict[str, Address]
system_contracts: Dict[str, Address]
def get_hash(self) -> ForkHash:
"""Return the hash of the fork config."""
obj = self.model_dump(mode="json", by_alias=True, exclude_none=True)
return ForkHash(crc32(json.dumps(obj, sort_keys=True, separators=(",", ":")).encode()))
class EthConfigResponse(CamelModel):
"""Response of the `eth_config` RPC endpoint."""
current: ForkConfig
next: ForkConfig | None = None
last: ForkConfig | None = None
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_rpc/tests/test_types.py | src/ethereum_test_rpc/tests/test_types.py | """Test the types in the `ethereum_test_rpc` package."""
from typing import Any, Dict
import pytest
from ethereum_test_rpc import EthConfigResponse
eth_config_dict: Dict[str, Any] = {
"current": {
"activationTime": 0,
"blobSchedule": {"baseFeeUpdateFraction": 3338477, "max": 6, "target": 3},
"chainId": "0x88bb0",
"forkId": "0xbef71d30",
"precompiles": {
"BLAKE2F": "0x0000000000000000000000000000000000000009",
"BN254_ADD": "0x0000000000000000000000000000000000000006",
"BN254_MUL": "0x0000000000000000000000000000000000000007",
"BN254_PAIRING": "0x0000000000000000000000000000000000000008",
"ECREC": "0x0000000000000000000000000000000000000001",
"ID": "0x0000000000000000000000000000000000000004",
"KZG_POINT_EVALUATION": "0x000000000000000000000000000000000000000a",
"MODEXP": "0x0000000000000000000000000000000000000005",
"RIPEMD160": "0x0000000000000000000000000000000000000003",
"SHA256": "0x0000000000000000000000000000000000000002",
},
"systemContracts": {"BEACON_ROOTS_ADDRESS": "0x000f3df6d732807ef1319fb7b8bb8522d0beac02"},
},
"next": {
"activationTime": 1742999832,
"blobSchedule": {"baseFeeUpdateFraction": 5007716, "max": 9, "target": 6},
"chainId": "0x88bb0",
"forkId": "0x0929e24e",
"precompiles": {
"BLAKE2F": "0x0000000000000000000000000000000000000009",
"BLS12_G1ADD": "0x000000000000000000000000000000000000000b",
"BLS12_G1MSM": "0x000000000000000000000000000000000000000c",
"BLS12_G2ADD": "0x000000000000000000000000000000000000000d",
"BLS12_G2MSM": "0x000000000000000000000000000000000000000e",
"BLS12_MAP_FP2_TO_G2": "0x0000000000000000000000000000000000000011",
"BLS12_MAP_FP_TO_G1": "0x0000000000000000000000000000000000000010",
"BLS12_PAIRING_CHECK": "0x000000000000000000000000000000000000000f",
"BN254_ADD": "0x0000000000000000000000000000000000000006",
"BN254_MUL": "0x0000000000000000000000000000000000000007",
"BN254_PAIRING": "0x0000000000000000000000000000000000000008",
"ECREC": "0x0000000000000000000000000000000000000001",
"ID": "0x0000000000000000000000000000000000000004",
"KZG_POINT_EVALUATION": "0x000000000000000000000000000000000000000a",
"MODEXP": "0x0000000000000000000000000000000000000005",
"RIPEMD160": "0x0000000000000000000000000000000000000003",
"SHA256": "0x0000000000000000000000000000000000000002",
},
"systemContracts": {
"BEACON_ROOTS_ADDRESS": "0x000f3df6d732807ef1319fb7b8bb8522d0beac02",
"CONSOLIDATION_REQUEST_PREDEPLOY_ADDRESS": (
"0x0000bbddc7ce488642fb579f8b00f3a590007251"
),
"DEPOSIT_CONTRACT_ADDRESS": ("0x00000000219ab540356cbb839cbe05303d7705fa"),
"HISTORY_STORAGE_ADDRESS": ("0x0000f90827f1c53a10cb7a02335b175320002935"),
"WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS": ("0x00000961ef480eb55e80d19ad83579a64c007002"),
},
},
"last": {
"activationTime": 1742999832,
"blobSchedule": {"baseFeeUpdateFraction": 5007716, "max": 9, "target": 6},
"chainId": "0x88bb0",
"forkId": "0x0929e24e",
"precompiles": {
"BLAKE2F": "0x0000000000000000000000000000000000000009",
"BLS12_G1ADD": "0x000000000000000000000000000000000000000b",
"BLS12_G1MSM": "0x000000000000000000000000000000000000000c",
"BLS12_G2ADD": "0x000000000000000000000000000000000000000d",
"BLS12_G2MSM": "0x000000000000000000000000000000000000000e",
"BLS12_MAP_FP2_TO_G2": "0x0000000000000000000000000000000000000011",
"BLS12_MAP_FP_TO_G1": "0x0000000000000000000000000000000000000010",
"BLS12_PAIRING_CHECK": "0x000000000000000000000000000000000000000f",
"BN254_ADD": "0x0000000000000000000000000000000000000006",
"BN254_MUL": "0x0000000000000000000000000000000000000007",
"BN254_PAIRING": "0x0000000000000000000000000000000000000008",
"ECREC": "0x0000000000000000000000000000000000000001",
"ID": "0x0000000000000000000000000000000000000004",
"KZG_POINT_EVALUATION": "0x000000000000000000000000000000000000000a",
"MODEXP": "0x0000000000000000000000000000000000000005",
"RIPEMD160": "0x0000000000000000000000000000000000000003",
"SHA256": "0x0000000000000000000000000000000000000002",
},
"systemContracts": {
"BEACON_ROOTS_ADDRESS": "0x000f3df6d732807ef1319fb7b8bb8522d0beac02",
"CONSOLIDATION_REQUEST_PREDEPLOY_ADDRESS": (
"0x0000bbddc7ce488642fb579f8b00f3a590007251"
),
"DEPOSIT_CONTRACT_ADDRESS": ("0x00000000219ab540356cbb839cbe05303d7705fa"),
"HISTORY_STORAGE_ADDRESS": ("0x0000f90827f1c53a10cb7a02335b175320002935"),
"WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS": ("0x00000961ef480eb55e80d19ad83579a64c007002"),
},
},
}
@pytest.fixture
def eth_config_response() -> EthConfigResponse:
"""
Get the `eth_config` response from the client to be verified by all tests.
"""
return EthConfigResponse.model_validate(eth_config_dict)
def test_fork_config_get_hash(eth_config_response: EthConfigResponse) -> None:
"""Test the `get_hash` method of the `ForkConfig` class."""
# Iterate through each fork config and validate
for config_name in ("current", "next", "last"):
config = getattr(eth_config_response, config_name)
expected = eth_config_dict[config_name]
if config is None:
assert expected is None
continue
# Top-level fields
assert config.activation_time == expected["activationTime"]
assert str(config.chain_id) == expected["chainId"]
assert str(config.fork_id) == expected["forkId"]
# Precompiles
assert set(config.precompiles.keys()) == set(expected["precompiles"].keys())
for k, v in expected["precompiles"].items():
assert config.precompiles[k] == v
# System contracts
assert set(config.system_contracts.keys()) == set(expected["systemContracts"].keys())
for k, v in expected["systemContracts"].items():
assert config.system_contracts[k] == v
# Blob schedule
if expected.get("blobSchedule") is not None:
assert config.blob_schedule is not None
assert (
config.blob_schedule.target_blobs_per_block == expected["blobSchedule"]["target"]
)
assert config.blob_schedule.max_blobs_per_block == expected["blobSchedule"]["max"]
assert (
config.blob_schedule.base_fee_update_fraction
== expected["blobSchedule"]["baseFeeUpdateFraction"]
)
else:
assert config.blob_schedule is None
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_test_rpc/tests/__init__.py | src/ethereum_test_rpc/tests/__init__.py | """Unit tests for the `ethereum_test_rpc` package."""
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_clis/file_utils.py | src/ethereum_clis/file_utils.py | """Methods to work with the filesystem and json."""
import os
import stat
from json import dump
from typing import Any, Dict
from pydantic import BaseModel, RootModel
def write_json_file(data: Dict[str, Any], file_path: str) -> None:
"""Write a JSON file to the given path."""
with open(file_path, "w") as f:
dump(data, f, ensure_ascii=False, indent=4)
def dump_files_to_directory(output_path: str, files: Dict[str, Any]) -> None:
"""Dump the files to the given directory."""
os.makedirs(output_path, exist_ok=True)
for file_rel_path_flags, file_contents in files.items():
file_rel_path, flags = (
file_rel_path_flags.split("+")
if "+" in file_rel_path_flags
else (file_rel_path_flags, "")
)
rel_path = os.path.dirname(file_rel_path)
if rel_path:
os.makedirs(os.path.join(output_path, rel_path), exist_ok=True)
file_path = os.path.join(output_path, file_rel_path)
with open(file_path, "w") as f:
if isinstance(file_contents, BaseModel) or isinstance(file_contents, RootModel):
f.write(
file_contents.model_dump_json(
indent=4,
exclude_none=True,
by_alias=True,
)
)
elif isinstance(file_contents, str):
f.write(file_contents)
else:
dump(file_contents, f, ensure_ascii=True, indent=4)
if flags:
file_mode = os.stat(file_path).st_mode
if "x" in flags:
file_mode |= stat.S_IEXEC
os.chmod(file_path, file_mode)
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_clis/cli_types.py | src/ethereum_clis/cli_types.py | """Types used in the transition tool interactions."""
import json
from pathlib import Path
from typing import Annotated, Any, Dict, List, Self
from pydantic import Field, PlainSerializer, PlainValidator
from ethereum_test_base_types import (
Bloom,
Bytes,
CamelModel,
EthereumTestRootModel,
Hash,
HexNumber,
)
from ethereum_test_base_types.composite_types import ForkBlobSchedule
from ethereum_test_exceptions import (
BlockException,
ExceptionMapperValidator,
ExceptionWithMessage,
TransactionException,
UndefinedException,
)
from ethereum_test_types import (
Alloc,
BlockAccessList,
Environment,
Transaction,
TransactionReceipt,
)
from ethereum_test_vm import Opcode, Opcodes
from pytest_plugins.custom_logging import get_logger
logger = get_logger(__name__)
class TransactionExceptionWithMessage(ExceptionWithMessage[TransactionException]):
"""Transaction exception with message."""
pass
class BlockExceptionWithMessage(ExceptionWithMessage[BlockException]):
"""Block exception with message."""
pass
class RejectedTransaction(CamelModel):
"""Rejected transaction."""
index: HexNumber
error: Annotated[
TransactionExceptionWithMessage | UndefinedException, ExceptionMapperValidator
]
class TraceLine(CamelModel):
"""Single trace line contained in the traces output."""
pc: int
op: int
gas: HexNumber
gas_cost: HexNumber | None = None
mem_size: int
stack: List[HexNumber | None]
depth: int
refund: int
op_name: str
error: str | None = None
def are_equivalent(self, other: Self) -> bool:
"""Return True if the only difference is the gas counter."""
self_dict = self.model_dump(mode="python", exclude={"gas", "gas_cost"})
other_dict = other.model_dump(mode="python", exclude={"gas", "gas_cost"})
if self_dict != other_dict:
logger.debug(f"Trace lines are not equivalent: {self_dict} != {other_dict}.")
return False
return True
class TransactionTraces(CamelModel):
"""Traces of a single transaction."""
traces: List[TraceLine]
output: str | None = None
gas_used: HexNumber | None = None
@classmethod
def from_file(cls, trace_file_path: Path) -> Self:
"""Read a single transaction's traces from a .jsonl file."""
trace_lines = trace_file_path.read_text().splitlines()
trace_dict: Dict[str, Any] = {}
if "gasUsed" in trace_lines[-1] and "output" in trace_lines[-1]:
trace_dict |= json.loads(trace_lines.pop())
trace_dict["traces"] = [TraceLine.model_validate_json(line) for line in trace_lines]
return cls.model_validate(trace_dict)
@staticmethod
def remove_gas(traces: List[TraceLine]) -> None:
"""
Remove the GAS operation opcode result from the stack to make
comparison possible even if the gas has been pushed to the stack.
"""
for i in range(1, len(traces)):
trace = traces[i]
previous_trace = traces[i - 1]
if previous_trace.op_name == "GAS" and trace.depth == previous_trace.depth:
# Remove the result of calling `Op.GAS` from the stack.
trace.stack[-1] = None
def are_equivalent(self, other: Self, enable_post_processing: bool) -> bool:
"""Return True if the only difference is the gas counter."""
if len(self.traces) != len(other.traces):
logger.debug(
f"Traces have different lengths: {len(self.traces)} != {len(other.traces)}."
)
return False
if self.output != other.output:
logger.debug(f"Traces have different outputs: {self.output} != {other.output}.")
return False
if self.gas_used != other.gas_used and not enable_post_processing:
logger.debug(f"Traces have different gas used: {self.gas_used} != {other.gas_used}.")
return False
own_traces = self.traces.copy()
other_traces = other.traces.copy()
if enable_post_processing:
logger.debug("Removing gas from traces (enable_post_processing=True).")
TransactionTraces.remove_gas(own_traces)
TransactionTraces.remove_gas(other_traces)
for i in range(len(self.traces)):
if not own_traces[i].are_equivalent(other_traces[i]):
logger.debug(f"Trace line {i} is not equivalent.")
return False
return True
def print(self) -> None:
"""Print the traces in a readable format."""
for exec_step, trace in enumerate(self.traces):
print(f"Step {exec_step}:")
print(trace.model_dump_json(indent=2))
print()
class Traces(EthereumTestRootModel):
"""
Traces returned from the transition tool for all transactions executed.
"""
root: List[TransactionTraces]
def append(self, item: TransactionTraces) -> None:
"""Append the transaction traces to the current list."""
self.root.append(item)
def are_equivalent(self, other: Self | None, enable_post_processing: bool) -> bool:
"""Return True if the only difference is the gas counter."""
if other is None:
return False
if len(self.root) != len(other.root):
return False
for i in range(len(self.root)):
if not self.root[i].are_equivalent(other.root[i], enable_post_processing):
logger.debug(f"Trace file {i} is not equivalent.")
return False
else:
logger.debug(f"Trace file {i} is equivalent.")
logger.debug("All traces are equivalent.")
return True
def print(self) -> None:
"""Print the traces in a readable format."""
for tx_number, tx in enumerate(self.root):
print(f"Transaction {tx_number}:")
tx.print()
_opcode_synonyms = {
"KECCAK256": "SHA3",
}
class UndefinedOpcode(HexNumber):
"""Undefined opcode."""
pass
def validate_opcode(obj: Any) -> Opcodes | Opcode | UndefinedOpcode:
"""Validate an opcode from a string."""
if isinstance(obj, (Opcode, Opcodes, UndefinedOpcode)):
return obj
if isinstance(obj, str):
if obj.startswith("0x"):
return UndefinedOpcode(obj)
if obj in _opcode_synonyms:
obj = _opcode_synonyms[obj]
for op in Opcodes:
if str(op) == obj:
return op
raise Exception(f"Unable to validate {obj} (type={type(obj)})")
class OpcodeCount(EthereumTestRootModel):
"""Opcode count returned from the evm tool."""
root: Dict[
Annotated[
Opcodes | UndefinedOpcode,
PlainValidator(validate_opcode),
PlainSerializer(lambda o: str(o)),
],
int,
]
def __add__(self, other: Self) -> Self:
"""Add two instances of opcode count dictionaries."""
assert isinstance(other, OpcodeCount), f"Incompatible type {type(other)}"
new_dict = self.model_dump() | other.model_dump()
for match_key in self.root.keys() & other.root.keys():
new_dict[match_key] = self.root[match_key] + other.root[match_key]
return self.__class__(new_dict)
class Result(CamelModel):
"""Result of a transition tool output."""
state_root: Hash
ommers_hash: Hash | None = Field(None, validation_alias="sha3Uncles")
transactions_trie: Hash = Field(..., alias="txRoot")
receipts_root: Hash
logs_hash: Hash
logs_bloom: Bloom
receipts: List[TransactionReceipt]
rejected_transactions: List[RejectedTransaction] = Field(
default_factory=list, alias="rejected"
)
difficulty: HexNumber | None = Field(None, alias="currentDifficulty")
gas_used: HexNumber
base_fee_per_gas: HexNumber | None = Field(None, alias="currentBaseFee")
withdrawals_root: Hash | None = None
excess_blob_gas: HexNumber | None = Field(None, alias="currentExcessBlobGas")
blob_gas_used: HexNumber | None = None
requests_hash: Hash | None = None
requests: List[Bytes] | None = None
block_access_list: BlockAccessList | None = None
block_access_list_hash: Hash | None = None
block_exception: Annotated[
BlockExceptionWithMessage | UndefinedException | None, ExceptionMapperValidator
] = None
traces: Traces | None = None
opcode_count: OpcodeCount | None = None
class TransitionToolInput(CamelModel):
"""Transition tool input."""
alloc: Alloc
txs: List[Transaction]
env: Environment
blob_params: ForkBlobSchedule | None = None
class TransitionToolOutput(CamelModel):
"""Transition tool output."""
alloc: Alloc
result: Result
body: Bytes | None = None
class TransitionToolContext(CamelModel):
"""Transition tool context."""
fork: str
chain_id: int = Field(..., alias="chainid")
reward: int
class TransitionToolRequest(CamelModel):
"""Transition tool server request data."""
state: TransitionToolContext
input: TransitionToolInput
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_clis/transition_tool.py | src/ethereum_clis/transition_tool.py | """Transition tool abstract class."""
import json
import os
import shutil
import subprocess
import tempfile
import textwrap
import time
from abc import abstractmethod
from dataclasses import dataclass
from pathlib import Path
from typing import Any, ClassVar, Dict, List, LiteralString, Mapping, Optional, Type
from urllib.parse import urlencode
from requests import Response
from requests.exceptions import ConnectionError as RequestsConnectionError
from requests.exceptions import ReadTimeout
from requests_unixsocket import Session # type: ignore
from ethereum_test_base_types import BlobSchedule
from ethereum_test_base_types.composite_types import ForkBlobSchedule
from ethereum_test_exceptions import ExceptionMapper
from ethereum_test_forks import Fork
from ethereum_test_forks.helpers import get_development_forks, get_forks
from ethereum_test_types import Alloc, Environment, Transaction
from .cli_types import (
OpcodeCount,
Traces,
TransactionReceipt,
TransactionTraces,
TransitionToolContext,
TransitionToolInput,
TransitionToolOutput,
TransitionToolRequest,
)
from .ethereum_cli import EthereumCLI
from .file_utils import dump_files_to_directory, write_json_file
model_dump_config: Mapping = {"by_alias": True, "exclude_none": True}
# TODO: reduce NORMAL_SERVER_TIMEOUT back down to 20 once BLS timeout issue is
# resolved: https://github.com/ethereum/execution-spec-tests/issues/1894
NORMAL_SERVER_TIMEOUT = 600
SLOW_REQUEST_TIMEOUT = 600
def get_valid_transition_tool_names() -> set[str]:
"""
Get all valid transition tool names from deployed and development forks.
"""
all_available_forks = get_forks() + get_development_forks()
return {fork.transition_tool_name() for fork in all_available_forks}
class TransitionTool(EthereumCLI):
"""
Transition tool abstract base class which should be inherited by all
transition tool implementations.
"""
traces: List[Traces] | None = None
registered_tools: List[Type["TransitionTool"]] = []
default_tool: Optional[Type["TransitionTool"]] = None
exception_mapper: ExceptionMapper
subcommand: Optional[str] = None
cached_version: Optional[str] = None
t8n_use_stream: bool = False
t8n_use_server: bool = False
server_url: str | None = None
process: Optional[subprocess.Popen] = None
supports_opcode_count: ClassVar[bool] = False
supports_xdist: ClassVar[bool] = True
supports_blob_params: ClassVar[bool] = False
@abstractmethod
def __init__(
self,
*,
exception_mapper: Optional[ExceptionMapper] = None,
binary: Optional[Path] = None,
trace: bool = False,
):
"""
Abstract initialization method that all subclasses must implement.
"""
assert exception_mapper is not None
self.exception_mapper = exception_mapper
super().__init__(binary=binary)
self.trace = trace
self._info_metadata: Optional[Dict[str, Any]] = {}
def __init_subclass__(cls) -> None:
"""Register all subclasses of TransitionTool as possible tools."""
TransitionTool.register_tool(cls)
@abstractmethod
def is_fork_supported(self, fork: Fork) -> bool:
"""Return True if the fork is supported by the tool."""
pass
def start_server(self) -> None:
"""
Start the t8n-server process, extract the port, and leave it
running for future reuse.
"""
pass
def shutdown(self) -> None:
"""Perform any cleanup tasks related to the tested tool."""
pass
def reset_traces(self) -> None:
"""Reset the internal trace storage for a new test to begin."""
self.traces = None
def append_traces(self, new_traces: Traces) -> None:
"""
Append a list of traces of a state transition to the current
list.
"""
if self.traces is None:
self.traces = []
self.traces.append(new_traces)
def get_traces(self) -> List[Traces] | None:
"""Return the accumulated traces."""
return self.traces
def collect_traces(
self,
receipts: List[TransactionReceipt],
temp_dir: tempfile.TemporaryDirectory,
debug_output_path: str = "",
) -> Traces:
"""
Collect the traces from the t8n tool output and store them in the
traces list.
"""
traces: Traces = Traces(root=[])
temp_dir_path = Path(temp_dir.name)
for i, r in enumerate(receipts):
trace_file_name = f"trace-{i}-{r.transaction_hash}.jsonl"
trace_file_path = temp_dir_path / trace_file_name
if debug_output_path:
shutil.copy(
trace_file_path,
Path(debug_output_path) / trace_file_name,
)
traces.append(TransactionTraces.from_file(trace_file_path))
self.append_traces(traces)
return traces
@dataclass
class TransitionToolData:
"""Transition tool files and data to pass between methods."""
alloc: Alloc
txs: List[Transaction]
env: Environment
fork: Fork
chain_id: int
reward: int
blob_schedule: BlobSchedule | None
state_test: bool = False
@property
def fork_name(self) -> str:
"""Return the fork name."""
return self.fork.transition_tool_name(
block_number=self.env.number,
timestamp=self.env.timestamp,
)
@property
def fork_name_if_supports_blob_params(self) -> str:
"""Return the fork name."""
fork = self.fork.fork_at(
block_number=self.env.number,
timestamp=self.env.timestamp,
)
# For tools that support blob_params, return base fork for BPO
# forks.
if fork.bpo_fork():
return fork.non_bpo_ancestor().transition_tool_name()
else:
return self.fork.transition_tool_name(
block_number=self.env.number,
timestamp=self.env.timestamp,
)
@property
def blob_params(self) -> ForkBlobSchedule | None:
"""Return the blob parameters for the current fork."""
if self.blob_schedule:
fork_name = self.fork.fork_at(
block_number=self.env.number, timestamp=self.env.timestamp
).name()
# Only return blob params if this fork has them
if fork_name in self.blob_schedule.root:
return self.blob_schedule[fork_name]
return None
def __post_init__(self) -> None:
"""Modify the reward if the environment number is 0."""
if self.env.number == 0:
self.reward = -1
def to_input(self) -> TransitionToolInput:
"""Convert the data to a TransactionToolInput object."""
return TransitionToolInput(
alloc=self.alloc,
txs=self.txs,
env=self.env,
blob_params=self.blob_params,
)
def get_request_data(self) -> TransitionToolRequest:
"""Convert the data to a TransitionToolRequest object."""
return TransitionToolRequest(
state=TransitionToolContext(
fork=self.fork_name,
chain_id=self.chain_id,
reward=self.reward,
),
input=self.to_input(),
)
def _evaluate_filesystem(
self,
*,
t8n_data: TransitionToolData,
debug_output_path: str = "",
) -> TransitionToolOutput:
"""
Execute a transition tool using the filesystem for its inputs and
outputs.
"""
temp_dir = tempfile.TemporaryDirectory()
os.mkdir(os.path.join(temp_dir.name, "input"))
os.mkdir(os.path.join(temp_dir.name, "output"))
input_contents = t8n_data.to_input().model_dump(mode="json", **model_dump_config)
input_paths = {
k: os.path.join(temp_dir.name, "input", f"{k}.json") for k in input_contents.keys()
}
for key, file_path in input_paths.items():
write_json_file(input_contents[key], file_path)
output_paths = {
output: os.path.join("output", f"{output}.json") for output in ["alloc", "result"]
}
output_paths["body"] = os.path.join("output", "txs.rlp")
# Construct args for evmone-t8n binary
args = [
str(self.binary),
"--state.fork",
t8n_data.fork_name_if_supports_blob_params
if self.supports_blob_params
else t8n_data.fork_name,
"--input.alloc",
input_paths["alloc"],
"--input.env",
input_paths["env"],
"--input.txs",
input_paths["txs"],
"--output.basedir",
temp_dir.name,
"--output.result",
output_paths["result"],
"--output.alloc",
output_paths["alloc"],
"--output.body",
output_paths["body"],
"--state.reward",
str(t8n_data.reward),
"--state.chainid",
str(t8n_data.chain_id),
]
if self.supports_opcode_count:
args.extend(
[
"--opcode.count",
"opcodes.json",
]
)
if self.supports_blob_params and input_paths.get("blobParams"):
args.extend(
[
"--input.blobParams",
input_paths["blobParams"],
]
)
if self.trace:
args.append("--trace")
result = subprocess.run(
args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
if debug_output_path:
if os.path.exists(debug_output_path):
shutil.rmtree(debug_output_path)
shutil.copytree(temp_dir.name, debug_output_path)
t8n_output_base_dir = os.path.join(debug_output_path, "t8n.sh.out")
t8n_call = " ".join(args)
for file_path in input_paths.values(): # update input paths
t8n_call = t8n_call.replace(
os.path.dirname(file_path), os.path.join(debug_output_path, "input")
)
# use a new output path for basedir and outputs
t8n_call = t8n_call.replace(
temp_dir.name,
t8n_output_base_dir,
)
t8n_script = textwrap.dedent(
f"""\
#!/bin/bash
# hard-coded to avoid surprises
rm -rf {debug_output_path}/t8n.sh.out
mkdir -p {debug_output_path}/t8n.sh.out/output
{t8n_call}
"""
)
dump_files_to_directory(
debug_output_path,
{
"args.py": args,
"returncode.txt": result.returncode,
"stdout.txt": result.stdout.decode(),
"stderr.txt": result.stderr.decode(),
"t8n.sh+x": t8n_script,
},
)
if result.returncode != 0:
raise Exception("failed to evaluate: " + result.stderr.decode())
for key, file_path in output_paths.items():
output_paths[key] = os.path.join(temp_dir.name, file_path)
output_contents = {}
for key, file_path in output_paths.items():
if "txs.rlp" in file_path:
continue
with open(file_path, "r+") as file:
output_contents[key] = json.load(file)
output = TransitionToolOutput.model_validate(
output_contents, context={"exception_mapper": self.exception_mapper}
)
if self.supports_opcode_count:
opcode_count_file_path = Path(temp_dir.name) / "opcodes.json"
if opcode_count_file_path.exists():
opcode_count = OpcodeCount.model_validate_json(opcode_count_file_path.read_text())
output.result.opcode_count = opcode_count
if debug_output_path:
dump_files_to_directory(
debug_output_path,
{
"opcodes.json": opcode_count.model_dump(),
},
)
if self.trace:
output.result.traces = self.collect_traces(
output.result.receipts, temp_dir, debug_output_path
)
temp_dir.cleanup()
return output
def _restart_server(self) -> None:
"""Check if server is still responsive and restart if needed."""
self.shutdown()
time.sleep(0.1)
self.start_server()
def _server_post(
self,
data: Dict[str, Any],
timeout: int,
url_args: Optional[Dict[str, List[str] | str]] = None,
retries: int = 5,
) -> Response:
"""Send a POST request to the t8n-server and return the response."""
if url_args is None:
url_args = {}
post_delay = 0.1
while True:
try:
response = Session().post(
f"{self.server_url}?{urlencode(url_args, doseq=True)}",
json=data,
timeout=timeout,
)
break
except (RequestsConnectionError, ReadTimeout) as e:
self._restart_server()
retries -= 1
if retries == 0:
raise e
time.sleep(post_delay)
post_delay *= 2
response.raise_for_status()
if response.status_code != 200:
raise Exception(
f"t8n-server returned status code {response.status_code}, "
f"response: {response.text}"
)
return response
def _generate_post_args(self, t8n_data: TransitionToolData) -> Dict[str, List[str] | str]:
"""Generate the arguments for the POST request to the t8n-server."""
del t8n_data
return {}
def _evaluate_server(
self,
*,
t8n_data: TransitionToolData,
debug_output_path: str = "",
timeout: int,
) -> TransitionToolOutput:
"""
Execute the transition tool sending inputs and outputs via a server.
"""
request_data = t8n_data.get_request_data()
request_data_json = request_data.model_dump(mode="json", **model_dump_config)
temp_dir = tempfile.TemporaryDirectory()
request_data_json["trace"] = self.trace
if self.trace:
request_data_json["output-basedir"] = temp_dir.name
if debug_output_path:
request_info = (
f"Server URL: {self.server_url}\n\n"
f"Request Data:\n{json.dumps(request_data_json, indent=2)}\n"
)
dump_files_to_directory(
debug_output_path,
{
"input/alloc.json": request_data.input.alloc,
"input/env.json": request_data.input.env,
"input/txs.json": [
tx.model_dump(mode="json", **model_dump_config)
for tx in request_data.input.txs
],
"input/blob_params.json": request_data.input.blob_params,
"request_info.txt": request_info,
},
)
response = self._server_post(
data=request_data_json, url_args=self._generate_post_args(t8n_data), timeout=timeout
)
response_json = response.json()
# pop optional test ``_info`` metadata from response, if present
self._info_metadata = response_json.pop("_info_metadata", {})
output: TransitionToolOutput = TransitionToolOutput.model_validate(
response_json, context={"exception_mapper": self.exception_mapper}
)
if self.trace:
output.result.traces = self.collect_traces(
output.result.receipts, temp_dir, debug_output_path
)
temp_dir.cleanup()
if debug_output_path:
response_info = (
f"Status Code: {response.status_code}\n\n"
f"Headers:\n{json.dumps(dict(response.headers), indent=2)}\n\n"
f"Content:\n{response.text}\n"
)
dump_files_to_directory(
debug_output_path,
{
"output/alloc.json": output.alloc,
"output/result.json": output.result,
"output/txs.rlp": str(output.body),
"response_info.txt": response_info,
},
)
return output
def _evaluate_stream(
self,
*,
t8n_data: TransitionToolData,
debug_output_path: str = "",
) -> TransitionToolOutput:
"""
Execute a transition tool using stdin and stdout for its inputs and
outputs.
"""
temp_dir = tempfile.TemporaryDirectory()
args = self.construct_args_stream(t8n_data, temp_dir)
stdin = t8n_data.to_input()
result = subprocess.run(
args,
input=stdin.model_dump_json(**model_dump_config).encode(),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
self.dump_debug_stream(debug_output_path, temp_dir, stdin, args, result)
if result.returncode != 0:
raise Exception("failed to evaluate: " + result.stderr.decode())
output: TransitionToolOutput = TransitionToolOutput.model_validate_json(
result.stdout, context={"exception_mapper": self.exception_mapper}
)
if debug_output_path:
dump_files_to_directory(
debug_output_path,
{
"output/alloc.json": output.alloc,
"output/result.json": output.result,
"output/txs.rlp": str(output.body),
},
)
if self.trace:
output.result.traces = self.collect_traces(
output.result.receipts, temp_dir, debug_output_path
)
temp_dir.cleanup()
return output
def safe_t8n_args(
self,
fork_name: str,
chain_id: int,
reward: int,
temp_dir: tempfile.TemporaryDirectory | None = None,
) -> List[str]:
"""Safely construct t8n arguments with validated inputs."""
# Validate fork name against actual transition tool names from all
# available forks
valid_forks = get_valid_transition_tool_names()
if fork_name not in valid_forks:
raise ValueError(f"Invalid fork name: {fork_name}")
# Validate chain ID (should be positive integer)
if not isinstance(chain_id, int) or chain_id <= 0:
raise ValueError(f"Invalid chain ID: {chain_id}")
# Validate reward (should be non-negative integer)
if not isinstance(reward, int) or reward < 0:
raise ValueError(f"Invalid reward: {reward}")
# Use literal strings for command flags
input_alloc: LiteralString = "--input.alloc=stdin"
input_txs: LiteralString = "--input.txs=stdin"
input_env: LiteralString = "--input.env=stdin"
output_result: LiteralString = "--output.result=stdout"
output_alloc: LiteralString = "--output.alloc=stdout"
output_body: LiteralString = "--output.body=stdout"
trace_flag: LiteralString = "--trace"
args = [
input_alloc,
input_txs,
input_env,
output_result,
output_alloc,
output_body,
f"--state.fork={fork_name}",
f"--state.chainid={chain_id}",
f"--state.reward={reward}",
]
if self.trace and temp_dir:
args.extend([trace_flag, f"--output.basedir={temp_dir.name}"])
return args
def construct_args_stream(
self, t8n_data: TransitionToolData, temp_dir: tempfile.TemporaryDirectory
) -> List[str]:
"""Construct arguments for t8n interaction via streams."""
command: list[str] = [str(self.binary)]
if self.subcommand:
command.append(self.subcommand)
safe_args = self.safe_t8n_args(
t8n_data.fork_name, t8n_data.chain_id, t8n_data.reward, temp_dir
)
return command + safe_args
def dump_debug_stream(
self,
debug_output_path: str,
temp_dir: tempfile.TemporaryDirectory,
stdin: TransitionToolInput,
args: List[str],
result: subprocess.CompletedProcess,
) -> None:
"""
Export debug files if requested when interacting with t8n via streams.
"""
if not debug_output_path:
return
t8n_call = " ".join(args)
t8n_output_base_dir = os.path.join(debug_output_path, "t8n.sh.out")
if self.trace:
t8n_call = t8n_call.replace(temp_dir.name, t8n_output_base_dir)
t8n_script = textwrap.dedent(
f"""\
#!/bin/bash
# hard-coded to avoid surprises
rm -rf {debug_output_path}/t8n.sh.out
# unused if tracing is not enabled
mkdir {debug_output_path}/t8n.sh.out
{t8n_call} < {debug_output_path}/stdin.txt
"""
)
dump_files_to_directory(
debug_output_path,
{
"args.py": args,
"input/alloc.json": stdin.alloc,
"input/env.json": stdin.env,
"input/txs.json": [
tx.model_dump(mode="json", **model_dump_config) for tx in stdin.txs
],
"returncode.txt": result.returncode,
"stdin.txt": stdin,
"stdout.txt": result.stdout.decode(),
"stderr.txt": result.stderr.decode(),
"t8n.sh+x": t8n_script,
},
)
def evaluate(
self,
*,
transition_tool_data: TransitionToolData,
debug_output_path: str = "",
slow_request: bool = False,
) -> TransitionToolOutput:
"""
Execute the relevant evaluate method as required by the `t8n` tool.
If a client's `t8n` tool varies from the default behavior, this method
can be overridden.
"""
if self.t8n_use_server:
if not self.server_url:
self.start_server()
return self._evaluate_server(
t8n_data=transition_tool_data,
debug_output_path=debug_output_path,
timeout=SLOW_REQUEST_TIMEOUT if slow_request else NORMAL_SERVER_TIMEOUT,
)
if self.t8n_use_stream:
return self._evaluate_stream(
t8n_data=transition_tool_data, debug_output_path=debug_output_path
)
return self._evaluate_filesystem(
t8n_data=transition_tool_data,
debug_output_path=debug_output_path,
)
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_clis/__init__.py | src/ethereum_clis/__init__.py | """
Library of Python wrappers for the different implementations of transition
tools.
"""
from .cli_types import (
BlockExceptionWithMessage,
Result,
Traces,
TransactionExceptionWithMessage,
TransitionToolOutput,
)
from .clis.besu import BesuTransitionTool
from .clis.ethereumjs import EthereumJSTransitionTool
from .clis.evmone import (
EvmOneBlockchainFixtureConsumer,
EvmoneExceptionMapper,
EvmOneStateFixtureConsumer,
EvmOneTransitionTool,
)
from .clis.execution_specs import ExecutionSpecsTransitionTool
from .clis.geth import GethFixtureConsumer, GethTransitionTool
from .clis.nethermind import Nethtest, NethtestFixtureConsumer
from .clis.nimbus import NimbusTransitionTool
from .ethereum_cli import CLINotFoundInPathError, UnknownCLIError
from .fixture_consumer_tool import FixtureConsumerTool
from .transition_tool import TransitionTool
TransitionTool.set_default_tool(ExecutionSpecsTransitionTool)
FixtureConsumerTool.set_default_tool(GethFixtureConsumer)
__all__ = (
"BesuTransitionTool",
"BlockExceptionWithMessage",
"CLINotFoundInPathError",
"EthereumJSTransitionTool",
"EvmoneExceptionMapper",
"EvmOneTransitionTool",
"EvmOneStateFixtureConsumer",
"EvmOneBlockchainFixtureConsumer",
"ExecutionSpecsTransitionTool",
"FixtureConsumerTool",
"GethFixtureConsumer",
"GethTransitionTool",
"Nethtest",
"NethtestFixtureConsumer",
"NimbusTransitionTool",
"Result",
"Traces",
"TransactionExceptionWithMessage",
"TransitionTool",
"TransitionToolOutput",
"UnknownCLIError",
)
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_clis/ethereum_cli.py | src/ethereum_clis/ethereum_cli.py | """Abstract base class to help create Python interfaces to Ethereum CLIs."""
import os
import shutil
import subprocess
from itertools import groupby
from pathlib import Path
from re import Pattern
from typing import Any, List, Optional, Type
from pytest_plugins.custom_logging import get_logger
logger = get_logger(__name__)
class UnknownCLIError(Exception):
"""Exception raised if an unknown CLI is encountered."""
pass
class CLINotFoundInPathError(Exception):
"""Exception raised if the specified CLI binary isn't found in the path."""
def __init__(
self,
message: str = "The CLI binary was not found in the path",
binary: Path | None = None,
) -> None:
"""Initialize the exception."""
if binary:
message = f"{message} ({binary})"
super().__init__(message)
class EthereumCLI:
"""
Abstract base class to help create Python interfaces to Ethereum CLIs.
This base class helps handle the special case of EVM subcommands, such as
the EVM transition tool `t8n`, which have multiple implementations, one
from each client team. In the case of these tools, this class mainly serves
to help instantiate the correct subclass based on the output of the CLI's
version flag.
"""
registered_tools: List[Type[Any]] = []
default_tool: Optional[Type[Any]] = None
binary: Path
default_binary: Path
detect_binary_pattern: Pattern
version_flag: str = "-v"
cached_version: Optional[str] = None
def __init__(self, *, binary: Optional[Path] = None):
"""Abstract init method that all subclasses must implement."""
if binary is None:
binary = self.default_binary
else:
# improve behavior of which by resolving the path:
# ~/relative paths don't work
resolved_path = Path(os.path.expanduser(binary)).resolve()
if resolved_path.exists():
binary = resolved_path
binary = shutil.which(binary) # type: ignore
if not binary:
raise CLINotFoundInPathError(binary=binary)
self.binary = Path(binary)
@classmethod
def register_tool(cls, tool_subclass: Type[Any]) -> None:
"""Register a given subclass as tool option."""
cls.registered_tools.append(tool_subclass) # raise NotImplementedError
@classmethod
def set_default_tool(cls, tool_subclass: Type[Any]) -> None:
"""Register the default tool subclass."""
cls.default_tool = tool_subclass
@classmethod
def from_binary_path(cls, *, binary_path: Optional[Path] = None, **kwargs: Any) -> Any:
"""
Instantiate the appropriate CLI subclass derived from the
CLI's `binary_path`.
This method will attempt to detect the CLI version and instantiate
the appropriate subclass based on the version output by running
the CLI with the version flag.
"""
assert cls.default_tool is not None, "default CLI implementation was never set"
# ensure provided t8n binary can be found and used
if binary_path is None:
logger.debug("Binary path of provided t8n is None!")
return cls.default_tool(binary=binary_path, **kwargs)
expanded_path = Path(os.path.expanduser(binary_path))
logger.debug(f"Expanded path of provided t8n: {expanded_path}")
resolved_path = expanded_path.resolve()
logger.debug(f"Resolved path of provided t8n: {resolved_path}")
if resolved_path.exists():
logger.debug("Resolved path exists")
binary = Path(resolved_path)
else:
logger.debug(
f"Resolved path does not exist: {resolved_path}\nTrying to find it via `which`"
)
# it might be that the provided binary exists in path
filename = os.path.basename(resolved_path)
binary = shutil.which(filename) # type: ignore
logger.debug(f"Output of 'which {binary_path}': {binary}")
if binary is None:
logger.error(f"Resolved t8n binary path does not exist: {resolved_path}")
raise CLINotFoundInPathError(binary=resolved_path)
assert binary is not None
logger.debug(f"Successfully located the path of the t8n binary: {binary}")
binary = Path(binary)
# Group the tools by version flag, so we only have to call the tool
# once for all the classes that share the same version flag
for version_flag, subclasses in groupby(
cls.registered_tools, key=lambda x: x.version_flag
):
logger.debug(
f"\n{'-' * 120}\nTrying this `version` flag to determine "
f"if t8n supported: {version_flag}"
)
# adding more logging reveals we check for `-v` twice..
try:
result = subprocess.run(
[binary, version_flag],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
logger.debug(
f"Subprocess:\n\tstdout: {result.stdout!r}\n\n\n\t"
f"stderr: {result.stderr!r}\n\n\n"
)
if result.returncode != 0:
logger.debug(f"Subprocess returncode is not 0!It is: {result.returncode}")
# don't raise exception, you are supposed to keep trying
# different version flags
continue
# if there is a breaking error try sth else
if result.stderr:
stderr_str = str(result.stderr)
if EthereumCLI.stderr_is_breaking(stderr=stderr_str):
logger.debug(f"Stderr detected: {stderr_str}")
continue
binary_output = ""
if result.stdout:
binary_output = result.stdout.decode().strip()
logger.debug(f"Stripped subprocess stdout: {binary_output}")
for subclass in subclasses:
logger.debug(f"Trying subclass {subclass}")
try:
if subclass.detect_binary(binary_output):
subclass_check_result = subclass(binary=binary, **kwargs)
return subclass_check_result
except Exception as e:
print(e)
continue
logger.debug(
f"T8n with version {binary_output} does not belong to subclass {subclass}"
)
except Exception as e:
logger.debug(
f"Trying to determine t8n version with flag `{version_flag}` failed: {e}"
)
continue
raise UnknownCLIError(f"Unknown CLI: {binary}")
@classmethod
def detect_binary(cls, binary_output: str) -> bool:
"""
Return True if a CLI's `binary_output` matches the
class's expected output.
"""
logger.debug(f"Trying to detect binary for {binary_output}..")
assert cls.detect_binary_pattern is not None
logger.debug(
f"Trying to match {binary_output} against this pattern: {cls.detect_binary_pattern}"
)
match_result = cls.detect_binary_pattern.match(binary_output)
match_successful: bool = match_result is not None
return match_successful
@classmethod
def is_installed(cls, binary_path: Optional[Path] = None) -> bool:
"""Return whether the tool is installed in the current system."""
if binary_path is None:
binary_path = cls.default_binary
else:
resolved_path = Path(os.path.expanduser(binary_path)).resolve()
if resolved_path.exists():
binary_path = resolved_path
binary = shutil.which(binary_path)
return binary is not None
@classmethod
def stderr_is_breaking(cls, *, stderr: str) -> bool:
"""
Process the stderr output and decide if the error is a
breaking error for this specific tool.
"""
# harmless java warning on certain systems (besu)
if "SVE vector length" in stderr:
return False
return True
def version(self) -> str:
"""
Return the name and version of the CLI as reported by
the CLI's version flag.
"""
if self.cached_version is None:
result = subprocess.run(
[str(self.binary), self.version_flag],
stdout=subprocess.PIPE,
)
if result.returncode != 0:
raise Exception("failed to evaluate: " + result.stderr.decode())
self.cached_version = result.stdout.decode().strip()
return self.cached_version
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_clis/fixture_consumer_tool.py | src/ethereum_clis/fixture_consumer_tool.py | """Fixture consumer tool abstract class."""
from typing import List, Type
from ethereum_test_fixtures import FixtureConsumer, FixtureFormat
from .ethereum_cli import EthereumCLI
class FixtureConsumerTool(FixtureConsumer, EthereumCLI):
"""
Fixture consumer tool abstract base class which should be inherited by all
fixture consumer tool implementations.
"""
registered_tools: List[Type["FixtureConsumerTool"]] = []
default_tool: Type["FixtureConsumerTool"] | None = None
def __init_subclass__(cls, *, fixture_formats: List[FixtureFormat]):
"""Register all subclasses of FixtureConsumerTool as possible tools."""
FixtureConsumerTool.register_tool(cls)
cls.fixture_formats = fixture_formats
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_clis/tests/test_execution_specs.py | src/ethereum_clis/tests/test_execution_specs.py | """Additional tests for the EELS t8n tool."""
import json
import os
import sysconfig
from pathlib import Path
from shutil import which
from typing import Dict, List, Type
import pytest
from pydantic import TypeAdapter
from ethereum_clis import ExecutionSpecsTransitionTool, TransitionTool
from ethereum_test_base_types import to_json
from ethereum_test_forks import Berlin
from ethereum_test_types import Alloc, Environment, Transaction
FIXTURES_ROOT = Path(os.path.join("src", "ethereum_clis", "tests", "fixtures"))
DEFAULT_EVM_T8N_BINARY_NAME = "ethereum-spec-evm-resolver"
@pytest.fixture(autouse=True)
def monkeypatch_path_for_entry_points(
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""
Monkeypatch the PATH to add the "bin" directory where entrypoints are
installed.
This would typically be in the venv in which pytest is running these tests
and fill, which, with uv, is `./.venv/bin`.
This is required in order for fill to locate the ethereum-spec-evm-resolver
"binary" (entrypoint) when being executed using pytester.
"""
bin_dir = sysconfig.get_path("scripts")
monkeypatch.setenv("PATH", f"{bin_dir}:{os.environ['PATH']}")
@pytest.mark.parametrize(
"alloc,expected_hash",
[
(
{
"0x1000000000000000000000000000000000000000": {
"balance": "0x0BA1A9CE0BA1A9CE",
"code": "0x",
"nonce": "0",
"storage": {},
},
},
bytes.fromhex("51e7c7508e76dca0"),
),
(
{
"0x1000000000000000000000000000000000000000": {
"balance": "0x0BA1A9CE0BA1A9CE",
},
},
bytes.fromhex("51e7c7508e76dca0"),
),
(
{
"0x1000000000000000000000000000000000000000": {
"balance": "0x0BA1A9CE0BA1A9CE",
"code": "0x",
"nonce": "1",
"storage": {},
},
},
bytes.fromhex("37c2dedbdea6b3af"),
),
(
{
"0x1000000000000000000000000000000000000000": {
"balance": "0",
"storage": {
"0x01": "0x01",
},
},
},
bytes.fromhex("096122e88929baec"),
),
],
)
def test_calc_state_root(
alloc: Dict,
expected_hash: bytes,
) -> None:
"""Test calculation of the state root against expected hash."""
assert Alloc(alloc).state_root().startswith(expected_hash)
@pytest.mark.parametrize("evm_tool", [ExecutionSpecsTransitionTool])
@pytest.mark.parametrize("binary_arg", ["no_binary_arg", "path_type", "str_type"])
def test_evm_tool_binary_arg(
evm_tool: Type[ExecutionSpecsTransitionTool], binary_arg: str
) -> None:
"""Test the `evm_tool` binary argument."""
if binary_arg == "no_binary_arg":
evm_tool().version()
return
elif binary_arg == "path_type":
evm_bin = which(DEFAULT_EVM_T8N_BINARY_NAME)
if not evm_bin:
# typing: Path can not take None; but if None, we may
# as well fail explicitly.
raise Exception(
f"Failed to find `{DEFAULT_EVM_T8N_BINARY_NAME}` in the PATH via which"
)
evm_tool(binary=Path(evm_bin)).version()
return
elif binary_arg == "str_type":
evm_bin_str = which(DEFAULT_EVM_T8N_BINARY_NAME)
if evm_bin_str:
evm_tool(binary=Path(evm_bin_str)).version()
return
raise Exception("unknown test parameter")
transaction_type_adapter = TypeAdapter(List[Transaction])
@pytest.fixture
def alloc(test_dir: str) -> Alloc:
"""Fixture for the `alloc.json` file."""
alloc_path = Path(FIXTURES_ROOT, test_dir, "alloc.json")
with open(alloc_path, "r") as f:
return Alloc.model_validate_json(f.read())
@pytest.fixture
def txs(test_dir: str) -> List[Transaction]:
"""Fixture for the `txs.json` file."""
txs_path = Path(FIXTURES_ROOT, test_dir, "txs.json")
with open(txs_path, "r") as f:
return transaction_type_adapter.validate_json(f.read())
@pytest.fixture
def env(test_dir: str) -> Environment:
"""Fixture for the `env.json` file."""
env_path = Path(FIXTURES_ROOT, test_dir, "env.json")
with open(env_path, "r") as f:
return Environment.model_validate_json(f.read())
@pytest.mark.parametrize("test_dir", os.listdir(path=FIXTURES_ROOT))
def test_evm_t8n(
default_t8n: TransitionTool,
alloc: Alloc,
txs: List[Transaction],
env: Environment,
test_dir: str,
) -> None:
"""
Test the `evaluate` method of the `ExecutionSpecsTransitionTool` class.
"""
expected_path = Path(FIXTURES_ROOT, test_dir, "exp.json")
with open(expected_path, "r") as exp:
expected = json.load(exp)
t8n_output = default_t8n.evaluate(
transition_tool_data=TransitionTool.TransitionToolData(
alloc=alloc,
txs=txs,
env=env,
fork=Berlin,
chain_id=1,
reward=0,
blob_schedule=Berlin.blob_schedule(),
),
)
assert to_json(t8n_output.alloc) == expected.get("alloc")
if isinstance(default_t8n, ExecutionSpecsTransitionTool):
# The expected output was generated with geth, instead of deleting
# any info from this expected output, the fields not returned by
# eels are handled here.
missing_receipt_fields = [
"root",
"status",
"cumulativeGasUsed",
"contractAddress",
"blockHash",
"transactionIndex",
]
for key in missing_receipt_fields:
for i, _ in enumerate(expected.get("result")["receipts"]):
del expected.get("result")["receipts"][i][key]
for i, receipt in enumerate(expected.get("result")["receipts"]):
if int(receipt["logsBloom"], 16) == 0:
del expected.get("result")["receipts"][i]["logsBloom"]
t8n_result = to_json(t8n_output.result)
for i, _ in enumerate(expected.get("result")["rejected"]):
del expected.get("result")["rejected"][i]["error"]
del t8n_result["rejected"][i]["error"]
assert t8n_result == expected.get("result")
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_clis/tests/test_transition_tools_support.py | src/ethereum_clis/tests/test_transition_tools_support.py | """Check T8N filling support."""
from typing import Dict
import pytest
from ethereum_clis import ExecutionSpecsTransitionTool, TransitionTool
from ethereum_test_base_types import Account, Address, TestAddress, TestPrivateKey
from ethereum_test_forks import (
ArrowGlacier,
Berlin,
Byzantium,
Cancun,
Constantinople,
Fork,
GrayGlacier,
London,
MuirGlacier,
Paris,
Prague,
get_deployed_forks,
)
from ethereum_test_specs.blockchain import BlockchainFixture, BlockchainTest
from ethereum_test_tools import (
AccessList,
AuthorizationTuple,
Block,
Environment,
Storage,
Transaction,
Withdrawal,
add_kzg_version,
)
from ethereum_test_types import Alloc
from ethereum_test_vm import Opcodes as Op
BLOB_COMMITMENT_VERSION_KZG = 1
fork_set = set(get_deployed_forks())
fork_set.add(Prague)
def test_ci_multi_t8n_support(
installed_transition_tool_instances: Dict[str, TransitionTool | Exception],
running_in_ci: bool,
) -> None:
"""
Check that the instances of t8n we expect in CI environment were found.
"""
names = set(installed_transition_tool_instances.keys())
expected_names = {"ExecutionSpecsTransitionTool"}
if running_in_ci:
expected_names.add("GethTransitionTool")
expected_names.add("EvmOneTransitionTool")
assert expected_names.issubset(names), (
f"Missing expected transition tools: {expected_names - names}"
)
@pytest.mark.parametrize(
"fork",
sorted(fork_set, key=lambda f: f.__name__), # type: ignore
)
def test_t8n_support(fork: Fork, installed_t8n: TransitionTool) -> None:
"""Stress test that sends all possible t8n interactions."""
if fork in [MuirGlacier, ArrowGlacier, GrayGlacier]:
return
if isinstance(installed_t8n, ExecutionSpecsTransitionTool) and fork in [Constantinople]:
return
env = Environment()
sender = TestAddress
storage_1 = Storage()
storage_2 = Storage()
code_account_1 = Address(0x1001)
code_account_2 = Address(0x1002)
pre = Alloc(
{
TestAddress: Account(balance=10_000_000),
code_account_1: Account(
code=Op.SSTORE(
storage_1.store_next(1, "blockhash_0_is_set"), Op.GT(Op.BLOCKHASH(0), 0)
)
+ Op.SSTORE(storage_1.store_next(0, "blockhash_1"), Op.BLOCKHASH(1))
+ Op.SSTORE(
storage_1.store_next(1 if fork < Paris else 0, "difficulty_1_is_near_20000"),
Op.AND(Op.GT(Op.PREVRANDAO(), 0x19990), Op.LT(Op.PREVRANDAO(), 0x20100)),
)
),
code_account_2: Account(
code=Op.SSTORE(
storage_2.store_next(1, "blockhash_1_is_set"), Op.GT(Op.BLOCKHASH(1), 0)
)
+ Op.SSTORE(
storage_2.store_next(1 if fork < Paris else 0, "difficulty_2_is_near_20000"),
Op.AND(Op.GT(Op.PREVRANDAO(), 0x19990), Op.LT(Op.PREVRANDAO(), 0x20100)),
)
),
}
)
tx_1 = Transaction(
gas_limit=100_000,
to=code_account_1,
data=b"",
nonce=0,
secret_key=TestPrivateKey,
protected=fork >= Byzantium,
)
if fork < Berlin:
# Feed legacy transaction, type 0
tx_2 = Transaction(
gas_limit=100_000,
to=code_account_2,
data=b"",
nonce=1,
secret_key=TestPrivateKey,
protected=fork >= Byzantium,
)
elif fork < London:
# Feed access list transaction, type 1
tx_2 = Transaction(
gas_limit=100_000,
to=code_account_2,
data=b"",
nonce=1,
secret_key=TestPrivateKey,
protected=fork >= Byzantium,
access_list=[
AccessList(
address=0x1234,
storage_keys=[0, 1],
)
],
)
elif fork < Cancun:
# Feed base fee transaction, type 2
tx_2 = Transaction(
to=code_account_2,
data=b"",
nonce=1,
secret_key=TestPrivateKey,
protected=fork >= Byzantium,
gas_limit=100_000,
max_priority_fee_per_gas=5,
max_fee_per_gas=10,
access_list=[
AccessList(
address=0x1234,
storage_keys=[0, 1],
)
],
)
elif fork < Prague:
# Feed blob transaction, type 3
tx_2 = Transaction(
to=code_account_2,
data=b"",
nonce=1,
secret_key=TestPrivateKey,
protected=fork >= Byzantium,
gas_limit=100_000,
max_priority_fee_per_gas=5,
max_fee_per_gas=10,
max_fee_per_blob_gas=30,
blob_versioned_hashes=add_kzg_version([1], BLOB_COMMITMENT_VERSION_KZG),
access_list=[
AccessList(
address=0x1234,
storage_keys=[0, 1],
)
],
)
else:
# Feed set code transaction, type 4
tx_2 = Transaction(
to=sender,
data=b"",
sender=sender,
secret_key=TestPrivateKey,
protected=fork >= Byzantium,
gas_limit=100_000,
max_priority_fee_per_gas=5,
max_fee_per_gas=10,
nonce=1,
access_list=[
AccessList(
address=0x1234,
storage_keys=[0, 1],
)
],
authorization_list=[
AuthorizationTuple(
address=code_account_2, nonce=2, signer=sender, secret_key=TestPrivateKey
),
],
)
block_1 = Block(
txs=[tx_1],
expected_post_state={
code_account_1: Account(
storage=storage_1,
),
},
)
block_2 = Block(
txs=[tx_2],
expected_post_state={
code_account_2: Account(
balance=1_000_000_000 if fork >= Cancun else 0,
storage=storage_2,
),
}
if fork < Prague
else {
code_account_2: Account(
balance=1_000_000_000 if fork >= Cancun else 0,
),
sender: Account(
storage=storage_2,
),
},
)
if fork >= Cancun:
block_2.withdrawals = [
Withdrawal(
address=code_account_2,
amount=1,
index=1,
validator_index=0,
),
]
test = BlockchainTest(
genesis_environment=env,
pre=pre,
post=block_1.expected_post_state,
blocks=[block_1, block_2],
)
test.generate(
t8n=installed_t8n,
fork=fork,
fixture_format=BlockchainFixture,
)
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_clis/tests/test_transition_tool.py | src/ethereum_clis/tests/test_transition_tool.py | """Test the transition tool and subclasses."""
import shutil
import subprocess
from pathlib import Path
from typing import Type
import pytest
from ethereum_clis import (
CLINotFoundInPathError,
EvmOneTransitionTool,
ExecutionSpecsTransitionTool,
GethTransitionTool,
NimbusTransitionTool,
TransitionTool,
)
def test_default_tool() -> None:
"""Tests that the default t8n tool is set."""
assert TransitionTool.default_tool is ExecutionSpecsTransitionTool
@pytest.mark.parametrize(
"binary_path,which_result,read_result,expected_class",
[
(
Path("evm"),
"evm",
"evm version 1.12.1-unstable-c7b099b2-20230627",
GethTransitionTool,
),
(
Path("evmone-t8n"),
"evmone-t8n",
"evmone-t8n 0.11.0-dev+commit.93997506",
EvmOneTransitionTool,
),
(
None,
"evm",
"evm version 1.12.1-unstable-c7b099b2-20230627",
ExecutionSpecsTransitionTool,
),
(
Path("t8n"),
"t8n",
"Nimbus-t8n 0.1.2\n\x1b[0m",
NimbusTransitionTool,
),
],
)
def test_from_binary(
monkeypatch: pytest.MonkeyPatch,
binary_path: Path | None,
which_result: str,
read_result: str,
expected_class: Type[TransitionTool],
) -> None:
"""Test that `from_binary` instantiates the correct subclass."""
class MockCompletedProcess:
def __init__(self, stdout: bytes) -> None:
self.stdout = stdout
self.stderr = None
self.returncode = 0
def mock_which(self: str) -> str:
del self
return which_result
def mock_run(args: list, **kwargs: dict) -> MockCompletedProcess:
del args, kwargs
return MockCompletedProcess(read_result.encode())
monkeypatch.setattr(shutil, "which", mock_which)
monkeypatch.setattr(subprocess, "run", mock_run)
assert isinstance(TransitionTool.from_binary_path(binary_path=binary_path), expected_class)
def test_unknown_binary_path() -> None:
"""
Test that `from_binary_path` raises `UnknownCLIError` for unknown
binary paths.
"""
with pytest.raises(CLINotFoundInPathError):
TransitionTool.from_binary_path(binary_path=Path("unknown_binary_path"))
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_clis/clis/nimbus.py | src/ethereum_clis/clis/nimbus.py | """Nimbus Transition tool interface."""
import re
import subprocess
from pathlib import Path
from typing import ClassVar, Dict, Optional
from ethereum_test_exceptions import (
BlockException,
ExceptionBase,
ExceptionMapper,
TransactionException,
)
from ethereum_test_forks import Fork
from ..transition_tool import TransitionTool
class NimbusTransitionTool(TransitionTool):
"""Nimbus `evm` Transition tool interface wrapper class."""
default_binary = Path("t8n")
detect_binary_pattern = re.compile(r"^Nimbus-t8n\b")
version_flag: str = "--version"
binary: Path
cached_version: Optional[str] = None
trace: bool
def __init__(
self,
*,
binary: Optional[Path] = None,
trace: bool = False,
):
"""Initialize the Nimbus Transition tool interface."""
super().__init__(exception_mapper=NimbusExceptionMapper(), binary=binary, trace=trace)
args = [str(self.binary), "--help"]
try:
result = subprocess.run(args, capture_output=True, text=True)
except subprocess.CalledProcessError as e:
raise Exception(
f"evm process unexpectedly returned a non-zero status code: {e}."
) from e
except Exception as e:
raise Exception(f"Unexpected exception calling evm tool: {e}.") from e
self.help_string = result.stdout
def version(self) -> str:
"""Get `evm` binary version."""
if self.cached_version is None:
self.cached_version = re.sub(r"\x1b\[0m", "", super().version()).strip()
return self.cached_version
def is_fork_supported(self, fork: Fork) -> bool:
"""
Return True if the fork is supported by the tool.
If the fork is a transition fork, we want to check the fork it
transitions to.
"""
return fork.transition_tool_name() in self.help_string
class NimbusExceptionMapper(ExceptionMapper):
"""
Translate between EEST exceptions and error strings returned by Nimbus.
"""
mapping_substring: ClassVar[Dict[ExceptionBase, str]] = {
TransactionException.TYPE_4_TX_CONTRACT_CREATION: (
"set code transaction must not be a create transaction"
),
TransactionException.INSUFFICIENT_ACCOUNT_FUNDS: "invalid tx: not enough cash to send",
TransactionException.TYPE_3_TX_MAX_BLOB_GAS_ALLOWANCE_EXCEEDED: (
"would exceed maximum allowance"
),
TransactionException.INSUFFICIENT_MAX_FEE_PER_BLOB_GAS: (
"max fee per blob gas less than block blob gas fee"
),
TransactionException.INSUFFICIENT_MAX_FEE_PER_GAS: (
"max fee per gas less than block base fee"
),
TransactionException.TYPE_3_TX_PRE_FORK: (
"blob tx used but field env.ExcessBlobGas missing"
),
TransactionException.TYPE_3_TX_INVALID_BLOB_VERSIONED_HASH: (
"invalid tx: one of blobVersionedHash has invalid version"
),
# TODO: temp solution until mapper for nimbus is fixed
TransactionException.GAS_LIMIT_EXCEEDS_MAXIMUM: "zero gasUsed but transactions present",
# This message is the same as TYPE_3_TX_MAX_BLOB_GAS_ALLOWANCE_EXCEEDED
TransactionException.TYPE_3_TX_BLOB_COUNT_EXCEEDED: "exceeds maximum allowance",
TransactionException.TYPE_3_TX_ZERO_BLOBS: "blob transaction missing blob hashes",
TransactionException.INTRINSIC_GAS_TOO_LOW: "zero gasUsed but transactions present",
TransactionException.INTRINSIC_GAS_BELOW_FLOOR_GAS_COST: "intrinsic gas too low",
TransactionException.INITCODE_SIZE_EXCEEDED: "max initcode size exceeded",
BlockException.RLP_BLOCK_LIMIT_EXCEEDED: (
# TODO:
"ExceededBlockSizeLimit: Exceeded block size limit"
),
}
mapping_regex: ClassVar[Dict[ExceptionBase, str]] = {}
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_clis/clis/nethermind.py | src/ethereum_clis/clis/nethermind.py | """Interfaces for Nethermind CLIs."""
import json
import re
import shlex
import subprocess
import textwrap
from functools import cache
from pathlib import Path
from typing import Any, Dict, List, Optional, Tuple
import pytest
from ethereum_test_exceptions import BlockException, ExceptionMapper, TransactionException
from ethereum_test_fixtures import BlockchainFixture, EOFFixture, FixtureFormat, StateFixture
from ..ethereum_cli import EthereumCLI
from ..file_utils import dump_files_to_directory
from ..fixture_consumer_tool import FixtureConsumerTool
class Nethtest(EthereumCLI):
"""Nethermind `nethtest` binary base class."""
default_binary = Path("nethtest")
# new pattern allows e.g. '1.2.3', in the past that was denied
detect_binary_pattern = re.compile(r"^\d+\.\d+\.\d+(-[a-zA-Z0-9]+)?(\+[a-f0-9]{40})?$")
version_flag: str = "--version"
cached_version: Optional[str] = None
def __init__(
self,
binary: Path,
trace: bool = False,
exception_mapper: ExceptionMapper | None = None,
):
"""Initialize the Nethtest class."""
self.binary = binary
self.trace = trace
# TODO: Implement NethermindExceptionMapper
self.exception_mapper = exception_mapper if exception_mapper else None
def _run_command(self, command: List[str]) -> subprocess.CompletedProcess:
try:
return subprocess.run(
command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
)
except subprocess.CalledProcessError as e:
raise Exception("Command failed with non-zero status.") from e
except Exception as e:
raise Exception("Unexpected exception calling evm tool.") from e
def _consume_debug_dump(
self,
command: Tuple[str, ...],
result: subprocess.CompletedProcess,
debug_output_path: Path,
) -> None:
# our assumption is that each command element is a string
assert all(isinstance(x, str) for x in command), (
f"Not all elements of 'command' list are strings: {command}"
)
# ensure that flags with spaces are wrapped in double-quotes
consume_direct_call = " ".join(shlex.quote(arg) for arg in command)
consume_direct_script = textwrap.dedent(
f"""\
#!/bin/bash
{consume_direct_call}
"""
)
dump_files_to_directory(
str(debug_output_path),
{
"consume_direct_args.py": command,
"consume_direct_returncode.txt": result.returncode,
"consume_direct_stdout.txt": result.stdout,
"consume_direct_stderr.txt": result.stderr,
"consume_direct.sh+x": consume_direct_script,
},
)
@cache # noqa
def help(self, subcommand: str | None = None) -> str:
"""Return the help string, optionally for a subcommand."""
help_command = [str(self.binary)]
if subcommand:
help_command.append(subcommand)
help_command.append("--help")
return self._run_command(help_command).stdout
@cache # noqa
def has_eof_support(self) -> bool:
"""
Return True if the `nethtest` binary supports the `--eofTest` flag.
Currently, nethtest EOF support is only available in nethermind's
feature/evm/eof branch
https://github.com/NethermindEth/nethermind/tree/feature/evm/eof
"""
return "--eofTest" in self.help()
class NethtestFixtureConsumer(
Nethtest,
FixtureConsumerTool,
fixture_formats=[StateFixture, BlockchainFixture, EOFFixture],
):
"""Nethermind implementation of the fixture consumer."""
def _build_command_with_options(
self,
fixture_format: FixtureFormat,
fixture_path: Path,
fixture_name: Optional[str] = None,
debug_output_path: Optional[Path] = None,
) -> Tuple[str, ...]:
assert fixture_name, "Fixture name must be provided for nethtest."
command = [str(self.binary)]
if fixture_format is BlockchainFixture:
command += ["--blockTest", "--filter", f"{re.escape(fixture_name)}"]
elif fixture_format is StateFixture:
# TODO: consider using `--filter` here to readily access traces
# from the output
pass # no additional options needed
elif fixture_format is EOFFixture:
command += ["--eofTest"]
else:
raise Exception(
f"Fixture format {fixture_format.format_name} not supported by {self.binary}"
)
command += ["--input", str(fixture_path)]
if debug_output_path:
command += ["--trace"]
return tuple(command)
@cache # noqa
def consume_state_test_file(
self,
fixture_path: Path,
command: Tuple[str, ...],
debug_output_path: Optional[Path] = None,
) -> Tuple[List[Dict[str, Any]], str]:
"""
Consume an entire state test file.
The `evm statetest` will always execute all the tests contained in a
file without the possibility of selecting a single test, so this
function is cached in order to only call the command once and
`consume_state_test` can simply select the result that was requested.
"""
del fixture_path
result = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
if debug_output_path:
self._consume_debug_dump(command, result, debug_output_path)
if result.returncode != 0:
raise Exception(
f"Unexpected exit code:\n{' '.join(command)}\n\n Error:\n{result.stderr}"
)
try:
result_json = json.loads(result.stdout)
except json.JSONDecodeError as e:
raise Exception(
f"Failed to parse JSON output on stdout from nethtest:\n{result.stdout}"
) from e
if not isinstance(result_json, list):
raise Exception(f"Unexpected result from evm statetest: {result_json}")
return result_json, result.stderr
def consume_state_test(
self,
command: Tuple[str, ...],
fixture_path: Path,
fixture_name: Optional[str] = None,
debug_output_path: Optional[Path] = None,
) -> None:
"""
Consume a single state test.
Uses the cached result from `consume_state_test_file` in order to not
call the command every time an select a single result from there.
"""
file_results, stderr = self.consume_state_test_file(
fixture_path=fixture_path,
command=command,
debug_output_path=debug_output_path,
)
if fixture_name:
# TODO: this check is too fragile; extend for ethereum/tests?
nethtest_suffix = "_d0g0v0_"
assert all(
test_result["name"].endswith(nethtest_suffix) for test_result in file_results
), (
"consume direct with nethtest doesn't support the multi-data statetest format "
"used in ethereum/tests (yet)"
)
test_result = [
test_result
for test_result in file_results
if test_result["name"].removesuffix(nethtest_suffix)
== f"{fixture_name.split('/')[-1]}"
]
assert len(test_result) < 2, f"Multiple test results for {fixture_name}"
assert len(test_result) == 1, f"Test result for {fixture_name} missing"
assert test_result[0]["pass"], (
f"State test '{fixture_name}' failed, available stderr:\n {stderr}"
)
else:
if any(not test_result["pass"] for test_result in file_results):
exception_text = "State test failed: \n" + "\n".join(
f"{test_result['name']}: " + test_result["error"]
for test_result in file_results
if not test_result["pass"]
)
raise Exception(exception_text)
def consume_blockchain_test(
self,
command: Tuple[str, ...],
fixture_path: Path,
fixture_name: Optional[str] = None,
debug_output_path: Optional[Path] = None,
) -> None:
"""Execute the the fixture at `fixture_path` via `nethtest`."""
del fixture_path
del fixture_name
result = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
if debug_output_path:
self._consume_debug_dump(command, result, debug_output_path)
if result.returncode != 0:
raise Exception(
f"nethtest exited with non-zero exit code ({result.returncode}).\n"
f"stdout:\n{result.stdout}\n"
f"stderr:\n{result.stderr}\n"
f"{' '.join(command)}"
)
@cache # noqa
def consume_eof_test_file(
self,
fixture_path: Path,
command: Tuple[str, ...],
debug_output_path: Optional[Path] = None,
) -> Tuple[Dict[Any, Any], str, str]:
"""Consume an entire EOF fixture file."""
del fixture_path
result = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
pattern = re.compile(r"^(test_.+?)\s+(PASS|FAIL)$", re.MULTILINE)
test_results = {
match.group(1): match.group(2) == "PASS" # Convert "PASS" to True
# and "FAIL" to False
for match in pattern.finditer(result.stdout)
}
if debug_output_path:
self._consume_debug_dump(command, result, debug_output_path)
if result.returncode != 0:
raise Exception(
f"Unexpected exit code:\n{' '.join(command)}\n\n Error:\n{result.stderr}"
)
return test_results, result.stdout, result.stderr
def consume_eof_test(
self,
command: Tuple[str, ...],
fixture_path: Path,
fixture_name: Optional[str],
debug_output_path: Optional[Path],
) -> None:
"""Execute the the EOF fixture at `fixture_path` via `nethtest`."""
if not self.has_eof_support():
pytest.skip("This version of nethtest does not support the `--eofTest` flag.")
file_results, stdout, stderr = self.consume_eof_test_file(
fixture_path=fixture_path,
command=command,
debug_output_path=debug_output_path,
)
assert fixture_name, "fixture_name is required for EOF tests"
modified_fixture_name = fixture_name.split("::")[-1].replace("\\x", "/x")
assert modified_fixture_name in file_results, (
f"Test result for {fixture_name} missing, available stdout:\n{stdout}.\n"
f"Parsed test results: {file_results}"
)
if stderr:
available_stderr = f"Available stderr:\n{stderr}"
else:
available_stderr = "(No output available.)"
assert file_results[modified_fixture_name], (
f"EOF test '{fixture_name}' failed. {available_stderr}"
)
def consume_fixture(
self,
fixture_format: FixtureFormat,
fixture_path: Path,
fixture_name: Optional[str] = None,
debug_output_path: Optional[Path] = None,
) -> None:
"""
Execute the appropriate geth fixture consumer for the fixture at
`fixture_path`.
"""
command = self._build_command_with_options(
fixture_format, fixture_path, fixture_name, debug_output_path
)
if fixture_format == BlockchainFixture:
self.consume_blockchain_test(
command=command,
fixture_path=fixture_path,
fixture_name=fixture_name,
debug_output_path=debug_output_path,
)
elif fixture_format == StateFixture:
self.consume_state_test(
command=command,
fixture_path=fixture_path,
fixture_name=fixture_name,
debug_output_path=debug_output_path,
)
elif fixture_format == EOFFixture:
self.consume_eof_test(
command=command,
fixture_path=fixture_path,
fixture_name=fixture_name,
debug_output_path=debug_output_path,
)
else:
raise Exception(
f"Fixture format {fixture_format.format_name} not supported by {self.binary}"
)
class NethermindExceptionMapper(ExceptionMapper):
"""Nethermind exception mapper."""
mapping_substring = {
TransactionException.SENDER_NOT_EOA: "sender has deployed code",
TransactionException.INTRINSIC_GAS_TOO_LOW: "intrinsic gas too low",
TransactionException.INTRINSIC_GAS_BELOW_FLOOR_GAS_COST: "intrinsic gas too low",
TransactionException.INSUFFICIENT_MAX_FEE_PER_GAS: "miner premium is negative",
TransactionException.PRIORITY_GREATER_THAN_MAX_FEE_PER_GAS: (
"InvalidMaxPriorityFeePerGas: Cannot be higher than maxFeePerGas"
),
TransactionException.GAS_ALLOWANCE_EXCEEDED: "Block gas limit exceeded",
TransactionException.NONCE_IS_MAX: "NonceTooHigh",
TransactionException.INITCODE_SIZE_EXCEEDED: "max initcode size exceeded",
TransactionException.NONCE_MISMATCH_TOO_LOW: "wrong transaction nonce",
TransactionException.INSUFFICIENT_MAX_FEE_PER_BLOB_GAS: (
"InsufficientMaxFeePerBlobGas: Not enough to cover blob gas fee"
),
TransactionException.TYPE_3_TX_PRE_FORK: (
"InvalidTxType: Transaction type in Custom is not supported"
),
TransactionException.TYPE_3_TX_ZERO_BLOBS: "blob transaction missing blob hashes",
TransactionException.TYPE_3_TX_INVALID_BLOB_VERSIONED_HASH: (
"InvalidBlobVersionedHashVersion: Blob version not supported"
),
TransactionException.TYPE_3_TX_CONTRACT_CREATION: "blob transaction of type create",
TransactionException.TYPE_4_EMPTY_AUTHORIZATION_LIST: (
"MissingAuthorizationList: Must be set"
),
TransactionException.TYPE_4_TX_CONTRACT_CREATION: (
"NotAllowedCreateTransaction: To must be set"
),
TransactionException.TYPE_4_TX_PRE_FORK: (
"InvalidTxType: Transaction type in Custom is not supported"
),
BlockException.INCORRECT_BLOB_GAS_USED: (
"HeaderBlobGasMismatch: Blob gas in header does not match calculated"
),
BlockException.INVALID_REQUESTS: "InvalidRequestsHash: Requests hash mismatch in block",
BlockException.INVALID_GAS_USED_ABOVE_LIMIT: (
"ExceededGasLimit: Gas used exceeds gas limit."
),
BlockException.RLP_BLOCK_LIMIT_EXCEEDED: (
"ExceededBlockSizeLimit: Exceeded block size limit"
),
BlockException.INVALID_DEPOSIT_EVENT_LAYOUT: (
"DepositsInvalid: Invalid deposit event layout:"
),
}
mapping_regex = {
TransactionException.INSUFFICIENT_ACCOUNT_FUNDS: (
r"insufficient sender balance|insufficient MaxFeePerGas for sender balance"
),
TransactionException.TYPE_3_TX_WITH_FULL_BLOBS: r"Transaction \d+ is not valid",
TransactionException.TYPE_3_TX_MAX_BLOB_GAS_ALLOWANCE_EXCEEDED: (
r"BlockBlobGasExceeded: A block cannot have more than \d+ blob gas, blobs count \d+, "
r"blobs gas used: \d+"
),
TransactionException.TYPE_3_TX_BLOB_COUNT_EXCEEDED: (
r"BlobTxGasLimitExceeded: Transaction's totalDataGas=\d+ "
r"exceeded MaxBlobGas per transaction=\d+"
),
TransactionException.GAS_LIMIT_EXCEEDS_MAXIMUM: (
r"TxGasLimitCapExceeded: Gas limit \d+ \w+ cap of \d+\.?"
),
BlockException.INCORRECT_EXCESS_BLOB_GAS: (
r"HeaderExcessBlobGasMismatch: Excess blob gas in header does not match calculated"
r"|Overflow in excess blob gas"
),
BlockException.INVALID_BLOCK_HASH: (
r"Invalid block hash 0x[0-9a-f]+ does not match calculated hash 0x[0-9a-f]+"
),
BlockException.SYSTEM_CONTRACT_EMPTY: (
r"(Withdrawals|Consolidations)Empty: Contract is not deployed\."
),
BlockException.SYSTEM_CONTRACT_CALL_FAILED: (
r"(Withdrawals|Consolidations)Failed: Contract execution failed\."
),
}
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_clis/clis/reth.py | src/ethereum_clis/clis/reth.py | """Reth execution client transition tool."""
from ethereum_test_exceptions import BlockException, ExceptionMapper, TransactionException
class RethExceptionMapper(ExceptionMapper):
"""Reth exception mapper."""
mapping_substring = {
TransactionException.SENDER_NOT_EOA: (
"reject transactions from senders with deployed code"
),
TransactionException.INSUFFICIENT_ACCOUNT_FUNDS: "lack of funds",
TransactionException.INITCODE_SIZE_EXCEEDED: "create initcode size limit",
TransactionException.INSUFFICIENT_MAX_FEE_PER_GAS: "gas price is less than basefee",
TransactionException.PRIORITY_GREATER_THAN_MAX_FEE_PER_GAS: (
"priority fee is greater than max fee"
),
TransactionException.GASLIMIT_PRICE_PRODUCT_OVERFLOW: "overflow",
TransactionException.TYPE_3_TX_CONTRACT_CREATION: "unexpected length",
TransactionException.TYPE_3_TX_WITH_FULL_BLOBS: "unexpected list",
TransactionException.TYPE_3_TX_INVALID_BLOB_VERSIONED_HASH: "blob version not supported",
TransactionException.TYPE_3_TX_ZERO_BLOBS: "empty blobs",
TransactionException.TYPE_4_EMPTY_AUTHORIZATION_LIST: "empty authorization list",
TransactionException.TYPE_4_TX_CONTRACT_CREATION: "unexpected length",
TransactionException.TYPE_4_TX_PRE_FORK: (
"eip 7702 transactions present in pre-prague payload"
),
BlockException.INVALID_DEPOSIT_EVENT_LAYOUT: (
"failed to decode deposit requests from receipts"
),
BlockException.INVALID_REQUESTS: "mismatched block requests hash",
BlockException.INVALID_RECEIPTS_ROOT: "receipt root mismatch",
BlockException.INVALID_STATE_ROOT: "mismatched block state root",
BlockException.INVALID_BLOCK_HASH: "block hash mismatch",
BlockException.INVALID_GAS_USED: "block gas used mismatch",
BlockException.RLP_BLOCK_LIMIT_EXCEEDED: "block is too large: ",
}
mapping_regex = {
TransactionException.NONCE_MISMATCH_TOO_LOW: r"nonce \d+ too low, expected \d+",
TransactionException.INSUFFICIENT_MAX_FEE_PER_BLOB_GAS: (
r"blob gas price \(\d+\) is greater than max fee per blob gas \(\d+\)"
),
TransactionException.INTRINSIC_GAS_TOO_LOW: (
r"call gas cost \(\d+\) exceeds the gas limit \(\d+\)"
),
TransactionException.INTRINSIC_GAS_BELOW_FLOOR_GAS_COST: (
r"gas floor \(\d+\) exceeds the gas limit \(\d+\)"
),
TransactionException.TYPE_3_TX_MAX_BLOB_GAS_ALLOWANCE_EXCEEDED: (
r"blob gas used \d+ exceeds maximum allowance \d+"
),
TransactionException.TYPE_3_TX_BLOB_COUNT_EXCEEDED: r"too many blobs, have \d+, max \d+",
TransactionException.TYPE_3_TX_PRE_FORK: (
r"blob transactions present in pre-cancun payload|empty blobs"
),
TransactionException.GAS_ALLOWANCE_EXCEEDED: (
r"transaction gas limit \w+ is more than blocks available gas \w+"
),
TransactionException.GAS_LIMIT_EXCEEDS_MAXIMUM: (
r"transaction gas limit.*is greater than the cap"
),
BlockException.SYSTEM_CONTRACT_CALL_FAILED: r"failed to apply .* requests contract call",
BlockException.INCORRECT_BLOB_GAS_USED: (
r"blob gas used mismatch|blob gas used \d+ is not a multiple of blob gas per blob"
),
BlockException.INCORRECT_EXCESS_BLOB_GAS: (
r"excess blob gas \d+ is not a multiple of blob gas per blob|invalid excess blob gas"
),
BlockException.INVALID_GAS_USED_ABOVE_LIMIT: (
r"block used gas \(\d+\) is greater than gas limit \(\d+\)"
),
}
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_clis/clis/ethrex.py | src/ethereum_clis/clis/ethrex.py | """Ethrex execution client transition tool."""
from ethereum_test_exceptions import BlockException, ExceptionMapper, TransactionException
class EthrexExceptionMapper(ExceptionMapper):
"""Ethrex exception mapper."""
mapping_substring = {
TransactionException.TYPE_3_TX_MAX_BLOB_GAS_ALLOWANCE_EXCEEDED: (
"Exceeded MAX_BLOB_GAS_PER_BLOCK"
),
BlockException.INVALID_DEPOSIT_EVENT_LAYOUT: ("Invalid deposit request layout"),
BlockException.INVALID_REQUESTS: (
"Requests hash does not match the one in the header after executing"
),
BlockException.INVALID_RECEIPTS_ROOT: (
"Receipts Root does not match the one in the header after executing"
),
BlockException.INVALID_STATE_ROOT: (
"World State Root does not match the one in the header after executing"
),
BlockException.INVALID_GAS_USED: "Gas used doesn't match value in header",
BlockException.INCORRECT_BLOB_GAS_USED: "Blob gas used doesn't match value in header",
}
mapping_regex = {
TransactionException.PRIORITY_GREATER_THAN_MAX_FEE_PER_GAS: (
r"(?i)priority fee.* is greater than max fee.*"
),
TransactionException.TYPE_4_EMPTY_AUTHORIZATION_LIST: r"(?i)empty authorization list",
TransactionException.SENDER_NOT_EOA: (
r"reject transactions from senders with deployed code|"
r"Sender account .* shouldn't be a contract"
),
TransactionException.NONCE_MISMATCH_TOO_LOW: r"nonce \d+ too low, expected \d+|"
r"Nonce mismatch.*",
TransactionException.TYPE_3_TX_MAX_BLOB_GAS_ALLOWANCE_EXCEEDED: (
r"blob gas used \d+ exceeds maximum allowance \d+"
),
TransactionException.TYPE_3_TX_ZERO_BLOBS: (
r"blob transactions present in pre-cancun payload|empty blobs|"
r"Type 3 transaction without blobs"
),
TransactionException.TYPE_3_TX_INVALID_BLOB_VERSIONED_HASH: (
r"blob version not supported|Invalid blob versioned hash"
),
TransactionException.TYPE_3_TX_PRE_FORK: (
r"blob versioned hashes not supported|"
r"Type 3 transactions are not supported before the Cancun fork"
),
# A type 4 Transaction without a recipient won't even reach the EVM, we
# can't decode it.
TransactionException.TYPE_4_TX_CONTRACT_CREATION: (
r"unexpected length|Contract creation in type 4 transaction|"
r"Error decoding field 'to' of type primitive_types::H160: InvalidLength"
),
TransactionException.TYPE_3_TX_CONTRACT_CREATION: (
r"unexpected length|Contract creation in type 3 transaction|"
r"Error decoding field 'to' of type primitive_types::H160: InvalidLength"
),
TransactionException.TYPE_4_TX_PRE_FORK: (
r"eip 7702 transactions present in pre-prague payload|"
r"Type 4 transactions are not supported before the Prague fork"
),
TransactionException.INSUFFICIENT_ACCOUNT_FUNDS: (
r"lack of funds \(\d+\) for max fee \(\d+\)|Insufficient account funds"
),
TransactionException.INTRINSIC_GAS_TOO_LOW: (
r"gas floor exceeds the gas limit|call gas cost exceeds the gas limit|"
r"Transaction gas limit lower than the minimum gas cost to execute the transaction"
),
TransactionException.INTRINSIC_GAS_BELOW_FLOOR_GAS_COST: (
r"Transaction gas limit lower than the gas cost floor for calldata tokens"
),
TransactionException.INSUFFICIENT_MAX_FEE_PER_GAS: (
r"gas price is less than basefee|Insufficient max fee per gas"
),
TransactionException.INSUFFICIENT_MAX_FEE_PER_BLOB_GAS: (
r"blob gas price is greater than max fee per blob gas|"
r"Insufficient max fee per blob gas.*"
),
TransactionException.INITCODE_SIZE_EXCEEDED: (
r"create initcode size limit|Initcode size exceeded.*"
),
TransactionException.NONCE_IS_MAX: (r"Nonce is max"),
TransactionException.GAS_ALLOWANCE_EXCEEDED: (r"Gas allowance exceeded.*"),
TransactionException.TYPE_3_TX_BLOB_COUNT_EXCEEDED: (r"Blob count exceeded.*"),
BlockException.SYSTEM_CONTRACT_CALL_FAILED: (r"System call failed.*"),
BlockException.SYSTEM_CONTRACT_EMPTY: (r"System contract:.* has no code after deployment"),
BlockException.INCORRECT_BLOB_GAS_USED: (r"Blob gas used doesn't match value in header"),
BlockException.RLP_STRUCTURES_ENCODING: (r"Error decoding field '\D+' of type \w+.*"),
BlockException.INCORRECT_EXCESS_BLOB_GAS: (r".* Excess blob gas is incorrect"),
BlockException.INVALID_BLOCK_HASH: (r"Invalid block hash. Expected \w+, got \w+"),
}
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
ethereum/execution-spec-tests | https://github.com/ethereum/execution-spec-tests/blob/88e9fb8f10ed89805aa3110d0a2cd5dcadc19689/src/ethereum_clis/clis/ethereumjs.py | src/ethereum_clis/clis/ethereumjs.py | """EthereumJS Transition tool interface."""
import re
from pathlib import Path
from typing import ClassVar, Dict, Optional
from ethereum_test_exceptions import (
BlockException,
ExceptionBase,
ExceptionMapper,
TransactionException,
)
from ethereum_test_forks import Fork
from ..transition_tool import TransitionTool
class EthereumJSTransitionTool(TransitionTool):
"""EthereumJS Transition tool interface wrapper class."""
default_binary = Path("ethereumjs-t8ntool.sh")
detect_binary_pattern = re.compile(r"^ethereumjs t8n\b")
version_flag: str = "--version"
t8n_use_stream = False
binary: Path
cached_version: Optional[str] = None
trace: bool
def __init__(
self,
*,
binary: Optional[Path] = None,
trace: bool = False,
):
"""Initialize the EthereumJS Transition tool interface."""
super().__init__(exception_mapper=EthereumJSExceptionMapper(), binary=binary, trace=trace)
def is_fork_supported(self, fork: Fork) -> bool:
"""
Return True if the fork is supported by the tool.
Currently, EthereumJS-t8n provides no way to determine supported forks.
"""
del fork
return True
class EthereumJSExceptionMapper(ExceptionMapper):
"""
Translate between EEST exceptions and error strings returned by EthereumJS.
"""
mapping_substring: ClassVar[Dict[ExceptionBase, str]] = {
TransactionException.TYPE_3_TX_MAX_BLOB_GAS_ALLOWANCE_EXCEEDED: (
"would exceed maximum allowance"
),
TransactionException.INSUFFICIENT_MAX_FEE_PER_BLOB_GAS: (
"Invalid 4844 transactions: undefined"
),
TransactionException.GASLIMIT_PRICE_PRODUCT_OVERFLOW: (
"gas limit * gasPrice cannot exceed MAX_INTEGER"
),
TransactionException.INSUFFICIENT_MAX_FEE_PER_GAS: "tx unable to pay base fee",
TransactionException.NONCE_IS_MAX: "nonce cannot equal or exceed",
TransactionException.PRIORITY_GREATER_THAN_MAX_FEE_PER_GAS: (
"maxFeePerGas cannot be less than maxPriorityFeePerGas"
),
TransactionException.TYPE_3_TX_INVALID_BLOB_VERSIONED_HASH: (
"versioned hash does not start with KZG commitment version"
),
# This message is the same as TYPE_3_TX_MAX_BLOB_GAS_ALLOWANCE_EXCEEDED
TransactionException.TYPE_3_TX_BLOB_COUNT_EXCEEDED: "exceed maximum allowance",
TransactionException.TYPE_3_TX_ZERO_BLOBS: "tx should contain at least one blob",
TransactionException.TYPE_3_TX_WITH_FULL_BLOBS: "Invalid EIP-4844 transaction",
TransactionException.TYPE_3_TX_CONTRACT_CREATION: (
'tx should have a "to" field and cannot be used to create contracts'
),
TransactionException.TYPE_4_EMPTY_AUTHORIZATION_LIST: (
"Invalid EIP-7702 transaction: authorization list is empty"
),
TransactionException.INTRINSIC_GAS_TOO_LOW: "is lower than the minimum gas limit of",
TransactionException.INTRINSIC_GAS_BELOW_FLOOR_GAS_COST: (
"is lower than the minimum gas limit of"
),
TransactionException.INITCODE_SIZE_EXCEEDED: (
"the initcode size of this transaction is too large"
),
TransactionException.TYPE_4_TX_CONTRACT_CREATION: (
'tx should have a "to" field and cannot be used to create contracts'
),
TransactionException.INSUFFICIENT_ACCOUNT_FUNDS: (
"sender doesn't have enough funds to send tx"
),
TransactionException.NONCE_MISMATCH_TOO_LOW: "the tx doesn't have the correct nonce",
TransactionException.GAS_ALLOWANCE_EXCEEDED: "tx has a higher gas limit than the block",
BlockException.INCORRECT_EXCESS_BLOB_GAS: "Invalid 4844 transactions",
BlockException.INVALID_RECEIPTS_ROOT: "invalid receipttrie",
BlockException.INVALID_DEPOSIT_EVENT_LAYOUT: (
"Error verifying block while running: error: number exceeds 53 bits"
),
}
mapping_regex: ClassVar[Dict[ExceptionBase, str]] = {
TransactionException.TYPE_3_TX_MAX_BLOB_GAS_ALLOWANCE_EXCEEDED: (
r"tx causes total blob gas of \d+ to exceed maximum blob gas per block of \d+|"
r"tx can contain at most \d+ blobs"
),
TransactionException.TYPE_3_TX_BLOB_COUNT_EXCEEDED: (
r"tx causes total blob gas of \d+ to exceed maximum blob gas per block of \d+|"
r"tx can contain at most \d+ blobs"
),
TransactionException.TYPE_3_TX_PRE_FORK: (
r"blob tx used but field env.ExcessBlobGas missing|EIP-4844 not enabled on Common"
),
BlockException.BLOB_GAS_USED_ABOVE_LIMIT: r"invalid blobGasUsed expected=\d+ actual=\d+",
BlockException.INCORRECT_BLOB_GAS_USED: r"invalid blobGasUsed expected=\d+ actual=\d+",
BlockException.INVALID_BLOCK_HASH: (
r"Invalid blockHash, expected: 0x[0-9a-f]+, received: 0x[0-9a-f]+"
),
BlockException.INVALID_REQUESTS: r"Unknown request identifier|invalid requestshash",
BlockException.INVALID_GAS_USED_ABOVE_LIMIT: (
r"Invalid block: too much gas used. Used: \d+, gas limit: \d+"
),
}
| python | MIT | 88e9fb8f10ed89805aa3110d0a2cd5dcadc19689 | 2026-01-05T06:50:32.790998Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.