{ "model_name": "Turnlet BERT Multilingual EOU", "model_type": "DistilBERT", "task": "text-classification", "languages": ["en", "hi", "es"], "tags": [ "end-of-utterance", "eou-detection", "multilingual", "distilbert", "onnx", "quantized", "conversational-ai", "dialogue", "turn-taking" ], "license": "apache-2.0", "datasets": ["turns-2k"], "metrics": { "validation": { "overall_accuracy": 0.9643, "en_accuracy": 0.9701, "hi_accuracy": 0.9689, "es_accuracy": 0.9452, "f1_score": 0.9635, "precision": 0.9491, "recall": 0.9783 }, "turns2k": { "accuracy": 0.9110, "f1_score": 0.9150, "precision": 0.9796, "recall": 0.8584, "threshold": 0.86 } }, "model_variants": { "pytorch": { "file": "model.safetensors", "size_mb": 517, "format": "safetensors" }, "onnx_optimized": { "file": "bert_model_optimized.onnx", "size_mb": 517, "format": "onnx", "precision": "fp32" }, "onnx_quantized": { "file": "bert_model_optimized_dynamic_int8.onnx", "size_mb": 132, "format": "onnx", "precision": "int8", "recommended": true } }, "training": { "method": "knowledge_distillation", "teacher_model": "qwen-based", "student_model": "distilbert", "epochs": 8, "final_step": 60500, "max_length": 128 }, "inference": { "recommended_threshold": 0.86, "max_sequence_length": 128, "batch_size_support": true } }