Huggy / run_logs /training_status.json
Cornelias's picture
Huggy
384cc0c verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199614,
"file_path": "results/Huggy2/Huggy/Huggy-199614.onnx",
"reward": 3.491738099543775,
"creation_time": 1746896407.3855953,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199614.pt"
]
},
{
"steps": 399958,
"file_path": "results/Huggy2/Huggy/Huggy-399958.onnx",
"reward": 3.9106246512383223,
"creation_time": 1746896659.3199399,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399958.pt"
]
},
{
"steps": 599868,
"file_path": "results/Huggy2/Huggy/Huggy-599868.onnx",
"reward": 4.219340689720646,
"creation_time": 1746896917.977607,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599868.pt"
]
},
{
"steps": 799951,
"file_path": "results/Huggy2/Huggy/Huggy-799951.onnx",
"reward": 3.817439305900347,
"creation_time": 1746897182.489629,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799951.pt"
]
},
{
"steps": 999952,
"file_path": "results/Huggy2/Huggy/Huggy-999952.onnx",
"reward": 3.774888351015801,
"creation_time": 1746897446.8492837,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999952.pt"
]
},
{
"steps": 1199991,
"file_path": "results/Huggy2/Huggy/Huggy-1199991.onnx",
"reward": 3.4847265423080067,
"creation_time": 1746897715.1702654,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199991.pt"
]
},
{
"steps": 1399961,
"file_path": "results/Huggy2/Huggy/Huggy-1399961.onnx",
"reward": 4.036517433822155,
"creation_time": 1746897983.4824073,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399961.pt"
]
},
{
"steps": 1599288,
"file_path": "results/Huggy2/Huggy/Huggy-1599288.onnx",
"reward": 3.7150960971290865,
"creation_time": 1746898244.760937,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599288.pt"
]
},
{
"steps": 1799888,
"file_path": "results/Huggy2/Huggy/Huggy-1799888.onnx",
"reward": 3.758997958125049,
"creation_time": 1746898512.566007,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799888.pt"
]
},
{
"steps": 1999925,
"file_path": "results/Huggy2/Huggy/Huggy-1999925.onnx",
"reward": 3.824669151079087,
"creation_time": 1746898777.8152852,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999925.pt"
]
},
{
"steps": 2000002,
"file_path": "results/Huggy2/Huggy/Huggy-2000002.onnx",
"reward": 3.850269155576825,
"creation_time": 1746898777.9372985,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000002.pt"
]
}
],
"final_checkpoint": {
"steps": 2000002,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.850269155576825,
"creation_time": 1746898777.9372985,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000002.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.7.0+cu126"
}
}