| import gradio as gr | |
| from transformers import pipeline | |
| import json | |
| from datetime import datetime | |
| import os | |
| LOG_FILE = "/tmp/log.jsonl" | |
| if not os.path.exists(LOG_FILE): | |
| with open(LOG_FILE, "w"): pass | |
| model = pipeline("token-classification", model="benchaffe/Bert-RAdam-Large", aggregation_strategy="simple") | |
| def to_serializable(obj): | |
| if isinstance(obj, (float, int, str, bool, type(None))): | |
| return obj | |
| elif isinstance(obj, dict): | |
| return {k: to_serializable(v) for k, v in obj.items()} | |
| elif isinstance(obj, list): | |
| return [to_serializable(i) for i in obj] | |
| else: | |
| return str(obj) | |
| def log_interaction(input_text, prediction): | |
| log_entry = { | |
| "timestamp": datetime.now().isoformat(), | |
| "input": input_text, | |
| "prediction": to_serializable(prediction) | |
| } | |
| with open(LOG_FILE, "a") as f: | |
| f.write(json.dumps(log_entry) + "\n") | |
| def predict_and_log(text): | |
| result = model(text) | |
| log_interaction(text, result) | |
| return result | |
| def get_log_file(): | |
| return LOG_FILE | |
| with gr.Blocks() as demo: | |
| gr.Markdown("## Biomedical Abbreviation Identifier") | |
| input_box = gr.Textbox(label="Enter biomedical text") | |
| output_box = gr.JSON(label="Model Prediction") | |
| download_output = gr.File(label="Click to download") | |
| submit_btn = gr.Button("Submit") | |
| download_btn = gr.Button("Download Log") | |
| submit_btn.click(fn=predict_and_log, inputs=input_box, outputs=output_box) | |
| download_btn.click(fn=get_log_file, outputs=download_output) | |
| demo.launch() |