soltaniali commited on
Commit
385eeb3
·
verified ·
1 Parent(s): 9631f8b

Upload README.md with huggingface_hub

Browse files
Files changed (1) hide show
  1. README.md +139 -0
README.md ADDED
@@ -0,0 +1,139 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # IDSF_BERT
2
+
3
+ This is a BERT-based model for Joint Intent Detection and Slot Filling (IDSF).
4
+
5
+ ## Model Description
6
+ - **Model Type:** BERT-based Joint Intent Detection and Slot Filling
7
+ - **Custom Architecture:** BertIDSF with intent and slot classification heads
8
+ - **Language:** English
9
+
10
+ ## Usage
11
+
12
+ ```python
13
+ import torch
14
+ import json
15
+ from transformers import AutoTokenizer, BertConfig
16
+ from transformers.models.bert.modeling_bert import BertPreTrainedModel, BertModel
17
+ from torch import nn
18
+
19
+ # First define the model architecture
20
+ class IntentClassifier(nn.Module):
21
+ def __init__(self, input_dim, num_intent_labels, dropout_rate=0.):
22
+ super(IntentClassifier, self).__init__()
23
+ self.dropout = nn.Dropout(dropout_rate)
24
+ self.linear = nn.Linear(input_dim, num_intent_labels)
25
+
26
+ def forward(self, x):
27
+ x = self.dropout(x)
28
+ return self.linear(x)
29
+
30
+ class SlotClassifier(nn.Module):
31
+ def __init__(self, input_dim, num_slot_labels, dropout_rate=0.):
32
+ super(SlotClassifier, self).__init__()
33
+ self.dropout = nn.Dropout(dropout_rate)
34
+ self.linear = nn.Linear(input_dim, num_slot_labels)
35
+
36
+ def forward(self, x):
37
+ x = self.dropout(x)
38
+ return self.linear(x)
39
+
40
+ class BertIDSF(BertPreTrainedModel):
41
+ def __init__(self, config, intent_label_lst, slot_label_lst, n_layers=1):
42
+ super().__init__(config)
43
+ self.num_intent_labels = len(intent_label_lst)
44
+ self.num_slot_labels = len(slot_label_lst)
45
+ self.bert = BertModel(config=config)
46
+
47
+ # Store dictionaries in config for later use
48
+ self.config.dict2 = {str(idx+1): label for idx, label in enumerate(slot_label_lst)}
49
+ self.config.inte2 = {str(idx+1): label for idx, label in enumerate(intent_label_lst)}
50
+
51
+ classifier_dropout = (
52
+ config.classifier_dropout if config.classifier_dropout is not None else config.hidden_dropout_prob
53
+ )
54
+ self.dropout = nn.Dropout(classifier_dropout)
55
+ self.intent_classifier = IntentClassifier(config.hidden_size, self.num_intent_labels)
56
+ self.slot_classifier = SlotClassifier(config.hidden_size, self.num_slot_labels)
57
+
58
+ def forward(
59
+ self,
60
+ input_ids=None,
61
+ attention_mask=None,
62
+ token_type_ids=None,
63
+ position_ids=None,
64
+ head_mask=None,
65
+ inputs_embeds=None,
66
+ labels=None,
67
+ intents=None,
68
+ output_attentions=True,
69
+ lens=None,
70
+ device=None
71
+ ):
72
+ outputs = self.bert(
73
+ input_ids,
74
+ attention_mask=attention_mask,
75
+ token_type_ids=token_type_ids,
76
+ position_ids=position_ids,
77
+ head_mask=head_mask,
78
+ inputs_embeds=inputs_embeds,
79
+ output_attentions=True
80
+ )
81
+
82
+ sequence_output = outputs[0]
83
+ sequence_output = self.dropout(sequence_output)
84
+
85
+ intent_logits = self.intent_classifier(sequence_output[:, 0, :])
86
+ slot_logits = self.slot_classifier(sequence_output)
87
+
88
+ total_loss = 0
89
+
90
+ # Intent Softmax
91
+ if intents is not None:
92
+ intent_loss_fct = nn.CrossEntropyLoss()
93
+ intent_loss = intent_loss_fct(intent_logits.view(-1, self.num_intent_labels), intents.view(-1))
94
+ total_loss += 0.5 * intent_loss
95
+
96
+ # Slot Softmax
97
+ if labels is not None:
98
+ slot_loss_fct = nn.CrossEntropyLoss(ignore_index=0)
99
+ # Only keep active parts of the loss
100
+ if attention_mask is not None:
101
+ active_loss = attention_mask.view(-1) == 1
102
+ active_logits = slot_logits.view(-1, self.num_slot_labels)[active_loss]
103
+ active_labels = labels.view(-1)[active_loss]
104
+ slot_loss = slot_loss_fct(active_logits, active_labels)
105
+ else:
106
+ slot_loss = slot_loss_fct(slot_logits.view(-1, self.num_slot_labels), labels.view(-1))
107
+ total_loss += 0.5 * slot_loss
108
+
109
+ outputs = ((intent_logits, slot_logits),) + outputs[2:] # add hidden states and attention if they are here
110
+ outputs = (total_loss,) + outputs
111
+
112
+ return outputs # (loss), scores, (hidden_states), (attentions)
113
+
114
+ # Now load and use the model
115
+ model_path = "soltaniali/IDSF_BERT"
116
+
117
+ # Load dictionaries from JSON files
118
+ with open('dict2.json', 'r') as f:
119
+ dict2 = json.load(f)
120
+ with open('inte2.json', 'r') as f:
121
+ inte2 = json.load(f)
122
+
123
+ # Initialize tokenizer and model
124
+ tokenizer = AutoTokenizer.from_pretrained(model_path)
125
+ config = BertConfig.from_pretrained(model_path)
126
+ model = BertIDSF.from_pretrained(
127
+ model_path,
128
+ config=config,
129
+ slot_label_lst=list(dict2.values()),
130
+ intent_label_lst=list(inte2.values())
131
+ )
132
+
133
+ # Process a sentence
134
+ sentence = "I want to transfer 200 dollars to my savings account"
135
+ # ... process with your IDSFService class
136
+ ```
137
+
138
+ ## Important Note
139
+ This model uses a custom architecture (BertIDSF) and requires both the class definition and dictionaries to be loaded correctly.