ppo-Huggy / run_logs /training_status.json
AdamShih's picture
Huggy
0e5b48e verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199749,
"file_path": "results/Huggy2/Huggy/Huggy-199749.onnx",
"reward": 3.3914928090187813,
"creation_time": 1745564282.0989547,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199749.pt"
]
},
{
"steps": 399842,
"file_path": "results/Huggy2/Huggy/Huggy-399842.onnx",
"reward": 3.8241215452315314,
"creation_time": 1745564523.6341527,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399842.pt"
]
},
{
"steps": 599915,
"file_path": "results/Huggy2/Huggy/Huggy-599915.onnx",
"reward": 2.952579608985356,
"creation_time": 1745564772.5405674,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599915.pt"
]
},
{
"steps": 799989,
"file_path": "results/Huggy2/Huggy/Huggy-799989.onnx",
"reward": 3.6234290570388605,
"creation_time": 1745565015.846088,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799989.pt"
]
},
{
"steps": 999872,
"file_path": "results/Huggy2/Huggy/Huggy-999872.onnx",
"reward": 3.8577577802870007,
"creation_time": 1745565266.17745,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999872.pt"
]
},
{
"steps": 1199622,
"file_path": "results/Huggy2/Huggy/Huggy-1199622.onnx",
"reward": 4.062610967954,
"creation_time": 1745565514.6229172,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199622.pt"
]
},
{
"steps": 1399951,
"file_path": "results/Huggy2/Huggy/Huggy-1399951.onnx",
"reward": 3.7719869295974355,
"creation_time": 1745565759.7632627,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399951.pt"
]
},
{
"steps": 1599949,
"file_path": "results/Huggy2/Huggy/Huggy-1599949.onnx",
"reward": 3.702286161695208,
"creation_time": 1745566008.8152733,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599949.pt"
]
},
{
"steps": 1799933,
"file_path": "results/Huggy2/Huggy/Huggy-1799933.onnx",
"reward": 3.870544500038272,
"creation_time": 1745566257.199284,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799933.pt"
]
},
{
"steps": 1999908,
"file_path": "results/Huggy2/Huggy/Huggy-1999908.onnx",
"reward": 3.6513583091802375,
"creation_time": 1745566501.1755106,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999908.pt"
]
},
{
"steps": 2000012,
"file_path": "results/Huggy2/Huggy/Huggy-2000012.onnx",
"reward": 3.65543290613978,
"creation_time": 1745566501.334506,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000012.pt"
]
}
],
"final_checkpoint": {
"steps": 2000012,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.65543290613978,
"creation_time": 1745566501.334506,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000012.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.7.0+cu126"
}
}