tal11 commited on
Commit
9ffecf6
·
verified ·
1 Parent(s): 5427f36

Upload 2 files

Browse files
Files changed (2) hide show
  1. config.json +6 -0
  2. modeling.py +54 -0
config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "my-warning-model",
3
+ "architectures": ["SimpleModel"],
4
+ "model_type": "simple-warning-model",
5
+ "hidden_size": 8
6
+ }
modeling.py ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # modeling.py
2
+ # Safe demonstration: prints/warns when executed, but DOES NOT fetch or execute remote code.
3
+
4
+ import warnings
5
+ import sys
6
+
7
+ # 1) Immediate visible print
8
+ print("\n" + "="*70)
9
+ print("⚠️ WARNING: You are loading a model that declares it MAY execute remote code.")
10
+ print("This repository is a demonstration and DOES NOT execute remote code.")
11
+ print("Before setting `trust_remote_code=True` review the model files manually.")
12
+ print("="*70 + "\n")
13
+
14
+ # 2) Also raise a Python warning (visible in many environments)
15
+ warnings.warn(
16
+ "This model includes custom Python code and may execute arbitrary logic when loaded. "
17
+ "Only load it with trust_remote_code=True after inspecting the repository.",
18
+ UserWarning,
19
+ )
20
+
21
+ # 3) Minimal HF-compatible model implementation (harmless).
22
+ from transformers import PreTrainedModel, PretrainedConfig
23
+
24
+ class SimpleWarningConfig(PretrainedConfig):
25
+ model_type = "simple-warning-model"
26
+
27
+ def __init__(self, hidden_size=8, **kwargs):
28
+ super().__init__(**kwargs)
29
+ self.hidden_size = hidden_size
30
+
31
+ class SimpleModel(PreTrainedModel):
32
+ config_class = SimpleWarningConfig
33
+ base_model_prefix = "simple_model"
34
+
35
+ def __init__(self, config: SimpleWarningConfig):
36
+ super().__init__(config)
37
+ # keep internals minimal and harmless
38
+ try:
39
+ import torch.nn as nn
40
+ self.dummy = nn.Linear(config.hidden_size, config.hidden_size)
41
+ except Exception:
42
+ # if torch not available, we still want the module importable
43
+ self.dummy = None
44
+
45
+ def forward(self, *args, **kwargs):
46
+ # harmless placeholder forward
47
+ try:
48
+ import torch
49
+ if self.dummy is None:
50
+ return torch.zeros(1, self.config.hidden_size)
51
+ return self.dummy(torch.zeros(1, self.config.hidden_size))
52
+ except Exception:
53
+ # if torch is missing, return a plain Python fallback
54
+ return [[0.0] * self.config.hidden_size]