SurvHTE-Bench / actg_syn /setups /actg_syn__Scenario_A__meta.json
snoroozi's picture
Upload folder using huggingface_hub
4d594b3 verified
raw
history blame
1.36 kB
{"summary": {"censoring_rate": 0.511921458625526, "treatment_rate": 0.5614773258532024, "event_time_min": 0.6536759401597113, "event_time_25pct": 25.493062254005324, "event_time_median": 33.84489207370351, "event_time_75pct": 43.290868915683404, "event_time_max": 95.25508215241828, "event_time_mean": 34.60805023757897, "event_time_std": 14.158181382353972, "censoring_time_min": 23.04080751896593, "censoring_time_median": 33.333493284505394, "censoring_time_max": 44.86151373159986, "censoring_time_mean": 33.3661146047237, "censoring_time_std": 3.3184348303759745, "ate": 5.044362088358642, "cate_min": -41.1870648720257, "cate_median": 4.957602854450798, "cate_max": 43.93721264162399, "ate_med_horizon": 2.51372147496, "cate_min_med_horizon": -31.0261803568309, "cate_median_med_horizon": 0.0, "cate_max_med_horizon": 32.36940731114939, "ate_p_surv_t25": 0.14417690594171115, "cate_min_p_surv_t25": -0.5565087480912062, "cate_median_p_surv_t25": 0.07341298480516889, "cate_max_p_surv_t25": 0.8024038021455714, "ate_p_surv_t50": 0.16806565363365902, "cate_min_p_surv_t50": -0.48653855389675515, "cate_median_p_surv_t50": 0.10197578389379369, "cate_max_p_surv_t50": 0.847454657327547, "ate_p_surv_t75": 0.0976654973298137, "cate_min_p_surv_t75": -0.6925592075231858, "cate_median_p_surv_t75": 0.021723201253806397, "cate_max_p_surv_t75": 0.8130819373869662}}