offiongbassey commited on
Commit
e867843
·
verified ·
1 Parent(s): 1b0ea56

Minor Changes.

Browse files
Files changed (1) hide show
  1. README.md +2 -2
README.md CHANGED
@@ -19,13 +19,13 @@ tokenizer = AutoTokenizer.from_pretrained("offiongbassey/efik-mt")
19
  model = AutoModelForSeq2SeqLM.from_pretrained("offiongbassey/efik-mt")
20
 
21
  # English → Efik
22
- text = "Jesus is God."
23
  inputs = tokenizer(f"eng_Latn {text}", return_tensors="pt")
24
  outputs = model.generate(**inputs, max_length=128)
25
  print(tokenizer.decode(outputs[0], skip_special_tokens=True))
26
 
27
  # Efik → English
28
- text = "Ka ke tietie."
29
  inputs = tokenizer(f"ibo_Latn {text}", return_tensors="pt")
30
  outputs = model.generate(**inputs, max_length=128)
31
  print(tokenizer.decode(outputs[0], skip_special_tokens=True))
 
19
  model = AutoModelForSeq2SeqLM.from_pretrained("offiongbassey/efik-mt")
20
 
21
  # English → Efik
22
+ text = "My child is very sick and I need to take him to the hospital for treatment."
23
  inputs = tokenizer(f"eng_Latn {text}", return_tensors="pt")
24
  outputs = model.generate(**inputs, max_length=128)
25
  print(tokenizer.decode(outputs[0], skip_special_tokens=True))
26
 
27
  # Efik → English
28
+ text = "Okon ama adaha utom tọñọ usenubọk."
29
  inputs = tokenizer(f"ibo_Latn {text}", return_tensors="pt")
30
  outputs = model.generate(**inputs, max_length=128)
31
  print(tokenizer.decode(outputs[0], skip_special_tokens=True))