File size: 2,012 Bytes
0842d68 82a3cd7 0842d68 82a3cd7 0842d68 82a3cd7 0842d68 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 | import { pipeline, DataType } from "@huggingface/transformers";
class Translator {
private static instance: Translator | null = null;
private pipeline: any = null;
private static modelId: string =
"onnx-community/translategemma-text-4b-it-ONNX";
private static dtype: DataType = "q4";
public static size: number = 3111894696;
private constructor() {}
public static getInstance(): Translator {
if (!Translator.instance) {
Translator.instance = new Translator();
}
return Translator.instance;
}
public async init(onProgress?: (progress: number) => void) {
if (this.pipeline) return;
const loaded = new Map<string, number>();
let newProgress = 0;
this.pipeline = await pipeline("text-generation", Translator.modelId, {
progress_callback: (e) => {
if (e.status === "progress") {
loaded.set(e.file, e.loaded);
const allLoaded = Array.from(loaded.values()).reduce(
(acc: number, curr: number) => acc + curr,
0
);
const percentLoaded =
Math.round((100 / Translator.size) * allLoaded * 100) / 100;
if (newProgress !== percentLoaded) {
newProgress = percentLoaded;
onProgress(newProgress);
}
}
},
device: "webgpu",
dtype: Translator.dtype,
});
}
public async translate(
text: string,
sourceLang: string,
targetLang: string
): Promise<string> {
if (!this.pipeline) {
throw new Error("Translator not initialized. Call init() first.");
}
const messages = [
{
role: "user",
content: [
{
type: "text",
source_lang_code: sourceLang,
target_lang_code: targetLang,
text,
},
],
},
];
const output = await this.pipeline(messages, {
max_new_tokens: 1024,
});
return output[0].generated_text.pop().content;
}
}
export default Translator;
|