Spaces:
Running
Running
| import os | |
| import importlib.util | |
| from huggingface_hub import hf_hub_download | |
| import gradio as gr | |
| infinity_repo_2026 = "SoyVitou/infinity-khmer-ocr-large" | |
| infinity_model = "model.py" | |
| EXAMPLES = [ | |
| ["./image.png"], | |
| ["./image1.png"], | |
| ["./image2.png"], | |
| ["./image3.png"], | |
| ["./image4.png"], | |
| ] | |
| def import_private_module(repo_id: str, filename: str, token: str): | |
| path = hf_hub_download(repo_id=repo_id, filename=filename, token=token) | |
| spec = importlib.util.spec_from_file_location("private_model", path) | |
| mod = importlib.util.module_from_spec(spec) | |
| spec.loader.exec_module(mod) | |
| return mod | |
| HF_TOKEN = os.environ.get("HF_TOKEN", "").strip() | |
| if not HF_TOKEN: | |
| raise RuntimeError("HF_TOKEN not found.") | |
| pm = import_private_module(infinity_repo_2026, infinity_model, HF_TOKEN) | |
| MODEL, TOK, CFG_OBJ, DEVICE = pm.load_from_private_repo(hf_hub_download, HF_TOKEN) | |
| def predict(img): | |
| return pm.predict_one(MODEL, TOK, CFG_OBJ, DEVICE, img) | |
| demo = gr.Interface( | |
| fn=predict, | |
| inputs=gr.Image(type="pil", label="Upload image"), | |
| outputs=gr.Textbox(label="OCR result", lines=6), | |
| title="Infinity Large Khmer OCR", | |
| examples=EXAMPLES, | |
| cache_examples=False, | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch() |