Huggy2 / run_logs /training_status.json
jeb746966's picture
Huggy
b07b6de verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199963,
"file_path": "results/Huggy2/Huggy/Huggy-199963.onnx",
"reward": 3.3394765267606643,
"creation_time": 1768053037.9602358,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199963.pt"
]
},
{
"steps": 399976,
"file_path": "results/Huggy2/Huggy/Huggy-399976.onnx",
"reward": 3.8544264073882784,
"creation_time": 1768053296.0281317,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399976.pt"
]
},
{
"steps": 599678,
"file_path": "results/Huggy2/Huggy/Huggy-599678.onnx",
"reward": 3.4202113492148265,
"creation_time": 1768053555.0983348,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599678.pt"
]
},
{
"steps": 799981,
"file_path": "results/Huggy2/Huggy/Huggy-799981.onnx",
"reward": 3.8313566990346715,
"creation_time": 1768053812.4807932,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799981.pt"
]
},
{
"steps": 999947,
"file_path": "results/Huggy2/Huggy/Huggy-999947.onnx",
"reward": 3.5526108284976994,
"creation_time": 1768054069.7561631,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999947.pt"
]
},
{
"steps": 1199851,
"file_path": "results/Huggy2/Huggy/Huggy-1199851.onnx",
"reward": 3.305702733366113,
"creation_time": 1768054328.186638,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199851.pt"
]
},
{
"steps": 1399646,
"file_path": "results/Huggy2/Huggy/Huggy-1399646.onnx",
"reward": 3.5256647891793516,
"creation_time": 1768054583.9233942,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399646.pt"
]
},
{
"steps": 1599974,
"file_path": "results/Huggy2/Huggy/Huggy-1599974.onnx",
"reward": 3.6165972117517815,
"creation_time": 1768054841.6212962,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599974.pt"
]
},
{
"steps": 1799982,
"file_path": "results/Huggy2/Huggy/Huggy-1799982.onnx",
"reward": 3.7560034374721716,
"creation_time": 1768055100.450611,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799982.pt"
]
},
{
"steps": 1999726,
"file_path": "results/Huggy2/Huggy/Huggy-1999726.onnx",
"reward": 3.7138798289246613,
"creation_time": 1768055353.7172632,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999726.pt"
]
},
{
"steps": 2000476,
"file_path": "results/Huggy2/Huggy/Huggy-2000476.onnx",
"reward": 3.672490000073376,
"creation_time": 1768055353.8566916,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000476.pt"
]
}
],
"final_checkpoint": {
"steps": 2000476,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.672490000073376,
"creation_time": 1768055353.8566916,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000476.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.8.0+cu128"
}
}