Translation
Transformers
Safetensors
qwen3
text-generation
text-generation-inference

Add `library_name` tag and correct `language` metadata

#1
by nielsr HF Staff - opened
Files changed (1) hide show
  1. README.md +9 -7
README.md CHANGED
@@ -1,4 +1,8 @@
1
  ---
 
 
 
 
2
  language:
3
  - en
4
  - zh
@@ -24,7 +28,6 @@ language:
24
  - hu
25
  - id
26
  - ko
27
- - no
28
  - ro
29
  - sk
30
  - sv
@@ -60,15 +63,12 @@ language:
60
  - ur
61
  - uz
62
  - yue
 
63
  metrics:
64
  - bleu
65
  - comet
66
- datasets:
67
- - NiuTrans/LMT-60-sft-data
68
- base_model:
69
- - NiuTrans/LMT-60-0.6B-Base
70
- license: apache-2.0
71
  pipeline_tag: translation
 
72
  ---
73
 
74
  ## LMT
@@ -100,7 +100,9 @@ model_name = "NiuTrans/LMT-60-8B"
100
  tokenizer = AutoTokenizer.from_pretrained(model_name, padding_side='left')
101
  model = AutoModelForCausalLM.from_pretrained(model_name)
102
 
103
- prompt = "Translate the following text from English into Chinese.\nEnglish: The concept came from China where plum blossoms were the flower of choice.\nChinese: "
 
 
104
  messages = [{"role": "user", "content": prompt}]
105
  text = tokenizer.apply_chat_template(
106
  messages,
 
1
  ---
2
+ base_model:
3
+ - NiuTrans/LMT-60-0.6B-Base
4
+ datasets:
5
+ - NiuTrans/LMT-60-sft-data
6
  language:
7
  - en
8
  - zh
 
28
  - hu
29
  - id
30
  - ko
 
31
  - ro
32
  - sk
33
  - sv
 
63
  - ur
64
  - uz
65
  - yue
66
+ license: apache-2.0
67
  metrics:
68
  - bleu
69
  - comet
 
 
 
 
 
70
  pipeline_tag: translation
71
+ library_name: transformers
72
  ---
73
 
74
  ## LMT
 
100
  tokenizer = AutoTokenizer.from_pretrained(model_name, padding_side='left')
101
  model = AutoModelForCausalLM.from_pretrained(model_name)
102
 
103
+ prompt = "Translate the following text from English into Chinese.
104
+ English: The concept came from China where plum blossoms were the flower of choice.
105
+ Chinese: "
106
  messages = [{"role": "user", "content": prompt}]
107
  text = tokenizer.apply_chat_template(
108
  messages,