aevion-codebase-rag-benchmark / benchmark_summary.json
aevionai's picture
Upload benchmark_summary.json with huggingface_hub
0f0baf7 verified
{
"auto_tune": true,
"codebase_root": ".",
"corpus_source": "codebase",
"generated_at": "2026-03-06T15:45:57.456781+00:00",
"include_oracle": false,
"max_constraints": 6,
"methods": {
"instructed": {
"avg_f1": 0.4627481121534755,
"avg_precision": 1.0,
"avg_recall": 0.3849464137584758,
"exact_match_rate": 0.23333333333333334,
"queries": 60.0
},
"naive": {
"avg_f1": 0.42538759196322323,
"avg_precision": 0.5160696692628963,
"avg_recall": 0.6566925088632642,
"exact_match_rate": 0.11666666666666667,
"queries": 60.0
},
"verified_consensus": {
"avg_f1": 0.5026392863678226,
"avg_precision": 1.0,
"avg_recall": 0.4371641699325427,
"exact_match_rate": 0.31666666666666665,
"queries": 60.0
},
"verified_structural": {
"avg_f1": 0.4627481121534755,
"avg_precision": 1.0,
"avg_recall": 0.3849464137584758,
"exact_match_rate": 0.23333333333333334,
"queries": 60.0
},
"verified_structural_ensemble": {
"avg_f1": 0.5267705644750732,
"avg_precision": 1.0,
"avg_recall": 0.45904681770501754,
"exact_match_rate": 0.3333333333333333,
"queries": 60.0
}
},
"min_constraints": 3,
"n_docs": 1500,
"n_queries_eval": 60,
"n_queries_generated": 300,
"n_queries_requested": 300,
"n_queries_tune": 240,
"retrieval_config": {
"ensemble_top_k": 2,
"ensemble_vote_threshold": 0.4,
"final_match_ratio": 0.65,
"min_match_ratio": 0.7,
"min_step_k": 20,
"noise_weight": 0.01,
"step_k_ratio": 0.06
},
"seed": 1337,
"tune_target": "verified_structural_ensemble"
}