Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import torch | |
| from transformers import AutoModelForCausalLM, AutoTokenizer | |
| # モデルID | |
| model_id = "tencent/HY-MT1.5-1.8B" | |
| # 環境に合わせてデバイスと精度を自動選択 | |
| # Freeスペース(CPU)の場合はfloat32、GPUがある場合はfloat16を使用 | |
| if torch.cuda.is_available(): | |
| device = "cuda" | |
| dtype = torch.float16 | |
| else: | |
| device = "cpu" | |
| dtype = torch.float32 | |
| print(f"Loading model on {device} with {dtype}...") | |
| # トークナイザーとモデルの読み込み | |
| tokenizer = AutoTokenizer.from_pretrained(model_id) | |
| model = AutoModelForCausalLM.from_pretrained( | |
| model_id, | |
| device_map=device, # autoではなく明示的に指定 | |
| torch_dtype=dtype | |
| ) | |
| def translate_text(source_text, target_lang): | |
| # プロンプトの切り替えロジック | |
| if "Chinese" in target_lang or "中文" in target_lang: | |
| prompt = f"将以下文本翻译为{target_lang},注意只需要输出翻译后的结果,不要额外解释:\n{source_text}" | |
| else: | |
| prompt = f"Translate the following segment into {target_lang}, without additional explanation.\n{source_text}" | |
| messages = [{"role": "user", "content": prompt}] | |
| # 入力処理 | |
| text_input = tokenizer.apply_chat_template( | |
| messages, | |
| tokenize=True, | |
| add_generation_prompt=False, | |
| return_tensors="pt" | |
| ).to(device) | |
| # 生成実行 | |
| with torch.no_grad(): | |
| generated_ids = model.generate( | |
| text_input, | |
| max_new_tokens=1024, | |
| temperature=0.7, | |
| top_p=0.6, | |
| repetition_penalty=1.05 | |
| ) | |
| # 出力処理 | |
| input_length = text_input.shape[1] | |
| response = generated_ids[0][input_length:] | |
| decoded_output = tokenizer.decode(response, skip_special_tokens=True) | |
| return decoded_output | |
| # UIの構築 | |
| langs = ["Japanese", "English", "Chinese", "Korean", "French", "German", "Spanish"] | |
| with gr.Blocks() as demo: | |
| gr.Markdown("# 🚀 HY-MT1.5-1.8B Translator (Spaces)") | |
| gr.Markdown("Tencentの1.8Bモデルを使用した翻訳デモです。") | |
| with gr.Row(): | |
| with gr.Column(): | |
| input_text = gr.Textbox(label="原文 (Source Text)", lines=5, placeholder="ここに入力...") | |
| target_lang = gr.Dropdown(choices=langs, value="English", label="翻訳先 (Target Language)") | |
| submit_btn = gr.Button("翻訳 (Translate)", variant="primary") | |
| with gr.Column(): | |
| output_text = gr.Textbox(label="結果 (Result)", lines=5, interactive=False) | |
| submit_btn.click( | |
| fn=translate_text, | |
| inputs=[input_text, target_lang], | |
| outputs=output_text | |
| ) | |
| # 起動 | |
| demo.launch() |