Huggy / run_logs /training_status.json
DanielGigliotti's picture
Initial commit
0a36d0d verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199983,
"file_path": "results/Huggy2/Huggy/Huggy-199983.onnx",
"reward": 3.487536111185628,
"creation_time": 1754498564.127911,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199983.pt"
]
},
{
"steps": 399982,
"file_path": "results/Huggy2/Huggy/Huggy-399982.onnx",
"reward": 3.7305156495422125,
"creation_time": 1754498795.8739219,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399982.pt"
]
},
{
"steps": 599938,
"file_path": "results/Huggy2/Huggy/Huggy-599938.onnx",
"reward": 3.3811470453555765,
"creation_time": 1754499029.0434253,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599938.pt"
]
},
{
"steps": 799920,
"file_path": "results/Huggy2/Huggy/Huggy-799920.onnx",
"reward": 3.7182161601798795,
"creation_time": 1754499259.1197937,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799920.pt"
]
},
{
"steps": 999928,
"file_path": "results/Huggy2/Huggy/Huggy-999928.onnx",
"reward": 3.8979571939360165,
"creation_time": 1754499497.1002662,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999928.pt"
]
},
{
"steps": 1199999,
"file_path": "results/Huggy2/Huggy/Huggy-1199999.onnx",
"reward": 3.9536127154506855,
"creation_time": 1754499739.3370306,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199999.pt"
]
},
{
"steps": 1399762,
"file_path": "results/Huggy2/Huggy/Huggy-1399762.onnx",
"reward": 3.9958297411600747,
"creation_time": 1754499978.4431624,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399762.pt"
]
},
{
"steps": 1599957,
"file_path": "results/Huggy2/Huggy/Huggy-1599957.onnx",
"reward": 3.8015442782518813,
"creation_time": 1754500214.871363,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599957.pt"
]
},
{
"steps": 1799992,
"file_path": "results/Huggy2/Huggy/Huggy-1799992.onnx",
"reward": 3.8497700432936353,
"creation_time": 1754500452.4601753,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799992.pt"
]
},
{
"steps": 1999987,
"file_path": "results/Huggy2/Huggy/Huggy-1999987.onnx",
"reward": 3.8936260133981704,
"creation_time": 1754500693.0240426,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999987.pt"
]
},
{
"steps": 2000094,
"file_path": "results/Huggy2/Huggy/Huggy-2000094.onnx",
"reward": 3.9127375585276907,
"creation_time": 1754500693.1350603,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000094.pt"
]
}
],
"final_checkpoint": {
"steps": 2000094,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.9127375585276907,
"creation_time": 1754500693.1350603,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000094.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.8.0+cu128"
}
}