File size: 1,153 Bytes
797f8cc |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 |
import torch
import torch.nn as nn
from transformers import BertModel
class PersonaAssigner(nn.Module):
def __init__(self, input_dim, hidden_dim, output_dim):
super(PersonaAssigner, self).__init__()
self.fc1 = nn.Linear(input_dim, hidden_dim)
self.fc2 = nn.Linear(hidden_dim, output_dim)
def forward(self, x):
x = torch.relu(self.fc1(x))
return self.fc2(x)
class PreferencePredictor(nn.Module):
def __init__(self, input_dim):
super(PreferencePredictor, self).__init__()
self.fc1 = nn.Linear(input_dim, 256)
self.fc2 = nn.Linear(256, 3)
def forward(self, x):
x = torch.relu(self.fc1(x))
return self.fc2(x)
class BERTEncoder(nn.Module):
def __init__(self, model_name='bert-base-uncased'):
super(BERTEncoder, self).__init__()
self.bert = BertModel.from_pretrained(model_name)
def forward(self, input_ids, attention_mask, token_type_ids=None):
outputs = self.bert(input_ids=input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids)
return outputs.last_hidden_state.mean(dim=1) |