EfektMotyla commited on
Commit
87cce36
Β·
verified Β·
1 Parent(s): 9de93b0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -7
app.py CHANGED
@@ -18,7 +18,6 @@ device = "cuda" if torch.cuda.is_available() else "cpu"
18
 
19
  ROOT = Path(__file__).parent
20
 
21
- # Uwaga: NIE ma katalogu models/
22
  aspect_dir = ROOT / "bert-aspect-ner"
23
  sentiment_dir = ROOT / "absa-roberta"
24
  # ────────────────────── modele lokalne ─────────────────────
@@ -39,19 +38,19 @@ sentiment_model = AutoModelForSequenceClassification.from_pretrained(
39
  # ────────────────────── modele tΕ‚umaczeΕ„ (on-line) ─────────
40
 
41
  model_name = "Helsinki-NLP/opus-mt-pl-en"
42
- pl_to_en_tokenizer = MarianTokenizer.from_pretrained(model_name)
43
- pl_to_en_model = MarianMTModel.from_pretrained(model_name).to(device)
44
 
 
 
 
 
45
  def translate_pl_to_en(texts):
46
  inputs = pl_to_en_tokenizer(texts, return_tensors="pt", padding=True, truncation=True).to(device)
47
  with torch.no_grad():
48
  translated = pl_to_en_model.generate(**inputs)
49
  return pl_to_en_tokenizer.batch_decode(translated, skip_special_tokens=True)
50
 
51
- en_to_pl_model_name = "gsarti/opus-mt-tc-en-pl"
52
- en_to_pl_tokenizer = MarianTokenizer.from_pretrained(en_to_pl_model_name)
53
- en_to_pl_model = MarianMTModel.from_pretrained(en_to_pl_model_name).to(device)
54
-
55
  def translate_en_to_pl(texts):
56
  inputs = en_to_pl_tokenizer(texts, return_tensors="pt", padding=True, truncation=True).to(device)
57
  with torch.no_grad():
 
18
 
19
  ROOT = Path(__file__).parent
20
 
 
21
  aspect_dir = ROOT / "bert-aspect-ner"
22
  sentiment_dir = ROOT / "absa-roberta"
23
  # ────────────────────── modele lokalne ─────────────────────
 
38
  # ────────────────────── modele tΕ‚umaczeΕ„ (on-line) ─────────
39
 
40
  model_name = "Helsinki-NLP/opus-mt-pl-en"
41
+ pl_to_en_tokenizer = MarianTokenizer.from_pretrained("translation-pl-en")
42
+ pl_to_en_model = MarianMTModel.from_pretrained("translation-pl-en").to(device)
43
 
44
+ en_to_pl_tokenizer = MarianTokenizer.from_pretrained("translation-en-pl")
45
+ en_to_pl_model = MarianMTModel.from_pretrained("translation-en-pl").to(device)
46
+
47
+ # πŸ” Funkcje tΕ‚umaczeΕ„
48
  def translate_pl_to_en(texts):
49
  inputs = pl_to_en_tokenizer(texts, return_tensors="pt", padding=True, truncation=True).to(device)
50
  with torch.no_grad():
51
  translated = pl_to_en_model.generate(**inputs)
52
  return pl_to_en_tokenizer.batch_decode(translated, skip_special_tokens=True)
53
 
 
 
 
 
54
  def translate_en_to_pl(texts):
55
  inputs = en_to_pl_tokenizer(texts, return_tensors="pt", padding=True, truncation=True).to(device)
56
  with torch.no_grad():