Eli181927 commited on
Commit
ef7bbab
·
verified ·
1 Parent(s): b3bed9e

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -11
app.py CHANGED
@@ -25,7 +25,7 @@ class ProductionTokenizer:
25
  self.vocab_size = len(self.vocab)
26
  else:
27
  # Build vocabulary from the target sentence
28
- target_sentence = "This model creates a super relationships between the words to predict what word"
29
  words = target_sentence.lower().split()
30
 
31
  # Create vocabulary
@@ -78,7 +78,7 @@ class ProductionMLM(nn.Module):
78
  # Global variables for the model
79
  model = None
80
  tokenizer = None
81
- TARGET_SENTENCE = "This model creates a super relationships between the words to predict what word"
82
 
83
  def load_model():
84
  """Load the trained production model."""
@@ -86,7 +86,7 @@ def load_model():
86
 
87
  try:
88
  # Try to load from saved model
89
- checkpoint_path = 'encoder_transformer/mlm_production/production_model.pt'
90
  if os.path.exists(checkpoint_path):
91
  checkpoint = torch.load(checkpoint_path, map_location='cpu')
92
 
@@ -178,7 +178,7 @@ def create_interface():
178
  # 🔮 Production MLM Word Prediction
179
 
180
  This model learns relationships between words in the sentence:
181
- > **"This model creates a super relationships between the words to predict what word"**
182
 
183
  **How it works:**
184
  1. Mask any word in the sentence by replacing it with `[MASK]`
@@ -200,9 +200,9 @@ def create_interface():
200
  gr.Markdown("""
201
  **Word positions in the target sentence:**
202
  ```
203
- 0: This 1: model 2: creates 3: a 4: super
204
- 5: relationships 6: between 7: the 8: words 9: to
205
- 10: predict 11: what 12: word
206
  ```
207
  """)
208
 
@@ -247,10 +247,10 @@ def create_interface():
247
  gr.Markdown("### 💡 Example Predictions")
248
 
249
  examples = [
250
- ["This model [MASK] a super relationships between the words to predict what word", 2],
251
- ["This model creates [MASK] super relationships between the words to predict what word", 3],
252
- ["This model creates a super [MASK] between the words to predict what word", 5],
253
- ["This model creates a super relationships [MASK] the words to predict what word", 6],
254
  ]
255
 
256
  gr.Examples(
 
25
  self.vocab_size = len(self.vocab)
26
  else:
27
  # Build vocabulary from the target sentence
28
+ target_sentence = "This model create relationships between the words to learn what word is missing!"
29
  words = target_sentence.lower().split()
30
 
31
  # Create vocabulary
 
78
  # Global variables for the model
79
  model = None
80
  tokenizer = None
81
+ TARGET_SENTENCE = "This model create relationships between the words to learn what word is missing!"
82
 
83
  def load_model():
84
  """Load the trained production model."""
 
86
 
87
  try:
88
  # Try to load from saved model
89
+ checkpoint_path = 'encoder_transformer/mlm/models/production_model.pt'
90
  if os.path.exists(checkpoint_path):
91
  checkpoint = torch.load(checkpoint_path, map_location='cpu')
92
 
 
178
  # 🔮 Production MLM Word Prediction
179
 
180
  This model learns relationships between words in the sentence:
181
+ > **"This model create relationships between the words to learn what word is missing!"**
182
 
183
  **How it works:**
184
  1. Mask any word in the sentence by replacing it with `[MASK]`
 
200
  gr.Markdown("""
201
  **Word positions in the target sentence:**
202
  ```
203
+ 0: This 1: model 2: create 3: relationships 4: between
204
+ 5: the 6: words 7: to 8: learn 9: what
205
+ 10: word 11: is 12: missing!
206
  ```
207
  """)
208
 
 
247
  gr.Markdown("### 💡 Example Predictions")
248
 
249
  examples = [
250
+ ["This model [MASK] relationships between the words to learn what word is missing!", 2],
251
+ ["This model create [MASK] between the words to learn what word is missing!", 3],
252
+ ["This model create relationships [MASK] the words to learn what word is missing!", 4],
253
+ ["This model create relationships between the words to [MASK] what word is missing!", 8],
254
  ]
255
 
256
  gr.Examples(