| tags: | |
| - pytorch | |
| - safetensors | |
| license: mit | |
| # dm_qwen4b_emulator | |
| 2-layer MLP. | |
| ## Config | |
| - `input_dim`: 6 | |
| - `hidden_dim`: 256 | |
| - `output_dim`: 3 | |
| ## Usage | |
| ```python | |
| import torch | |
| import torch.nn as nn | |
| from safetensors.torch import load_file | |
| from huggingface_hub import hf_hub_download | |
| class MLP(nn.Module): | |
| def __init__(self, input_dim, hidden_dim, output_dim): | |
| super().__init__() | |
| self.mlp = nn.Sequential( | |
| nn.Linear(input_dim, hidden_dim), | |
| nn.LayerNorm(hidden_dim), | |
| nn.ReLU(), | |
| nn.Linear(hidden_dim, hidden_dim), | |
| nn.LayerNorm(hidden_dim), | |
| nn.ReLU(), | |
| nn.Linear(hidden_dim, output_dim), | |
| ) | |
| def forward(self, x): | |
| return self.mlp(x) | |
| path = hf_hub_download("chewwt/dm_qwen4b_emulator", "model.safetensors") | |
| model = MLP(input_dim=6, hidden_dim=256, output_dim=3) | |
| model.load_state_dict(load_file(path)) | |
| model.eval() | |
| with torch.no_grad(): | |
| out = model(torch.randn(1, 6)) | |
| ``` | |