Spaces:
Runtime error
Runtime error
File size: 1,769 Bytes
88e5557 48f431c 88e5557 48f431c 186f7d8 48f431c cb9c997 186f7d8 cb9c997 48f431c cb9c997 48f431c cb9c997 48f431c 88e5557 cb9c997 48f431c bb2a31f 186f7d8 48f431c 88e5557 48f431c 88e5557 cb9c997 88e5557 cb9c997 48f431c 186f7d8 88e5557 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 | import sys
import huggingface_hub
if not hasattr(huggingface_hub, "HfFolder"):
class MockHfFolder:
@staticmethod
def get_token(): return None
huggingface_hub.HfFolder = MockHfFolder
import torch
import onnxruntime as rt
from torchvision import transforms as T
from PIL import Image
from tokenizer_base import Tokenizer
import pathlib
import os
import gradio as gr
# Configuración de rutas
cwd = pathlib.Path(__file__).parent.resolve()
model_file = os.path.join(cwd, "secret_models", "captcha.onnx")
img_size = (32, 128)
charset = r"0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~"
tokenizer_base = Tokenizer(charset)
def get_transform(img_size):
return T.Compose([
T.Resize(img_size, T.InterpolationMode.BICUBIC),
T.ToTensor(),
T.Normalize(0.5, 0.5)
])
transform = get_transform(img_size)
ort_session = rt.InferenceSession(model_file)
def predict(img):
try:
if img is None: return "Error: No hay imagen"
x = transform(img.convert('RGB')).unsqueeze(0)
ort_inputs = {ort_session.get_inputs()[0].name: x.detach().cpu().numpy()}
logits = ort_session.run(None, ort_inputs)[0]
probs = torch.tensor(logits).softmax(-1)
preds, _ = tokenizer_base.decode(probs)
return preds[0]
except Exception as e:
return f"Error: {str(e)}"
# Interfaz simplificada con Blocks
with gr.Blocks() as demo:
gr.Markdown("### API Captcha Solver")
input_img = gr.Image(type="pil")
output_text = gr.Textbox()
btn = gr.Button("Resolver")
btn.click(fn=predict, inputs=input_img, outputs=output_text)
if __name__ == "__main__":
demo.launch(server_name="0.0.0.0", server_port=7860, show_api=False) |