| import torch, torch.nn as nn |
| from huggingface_hub import hf_hub_download |
| from safetensors.torch import load_file |
|
|
| class OpenAI: |
| def __init__(self, token=None): |
| self.d = "cuda" if torch.cuda.is_available() else "cpu" |
| class M(nn.Module): |
| def __init__(self): |
| super().__init__(); self.embedding = nn.Embedding(256, 1); self.classifier = nn.Linear(1, 2, bias=False) |
| def forward(self, x): return self.classifier(self.embedding(x).float().mean(0)) |
| self.m = M().to(self.d); self.m.load_state_dict(load_file(hf_hub_download("faunix/YEN", "model.safetensors", token=token))) |
| self.chat = type('o', (object,), {"completions": type('o', (object,), {"create": self._c})()})() |
| def _c(self, **k): |
| t = torch.tensor([ord(c)%256 for c in k['messages'][-1]['content']], device=self.d) |
| with torch.no_grad(): out = self.m(t); res = "Yes" if out[1] > out[0] else "No" |
| return type('o', (object,), {"choices": [type('o', (object,), {"message": type('o', (object,), {"content": res})()})()]})() |