ppo-Huggy / run_logs /training_status.json
Simple-Chop's picture
Huggy
b2303ce verified
{
"Huggy": {
"checkpoints": [
{
"steps": 399848,
"file_path": "results/Huggy1/Huggy/Huggy-399848.onnx",
"reward": 3.633220403282731,
"creation_time": 1741311239.4816275,
"auxillary_file_paths": [
"results/Huggy1/Huggy/Huggy-399848.pt"
]
},
{
"steps": 599290,
"file_path": "results/Huggy1/Huggy/Huggy-599290.onnx",
"reward": 4.212124884128571,
"creation_time": 1741311477.1360376,
"auxillary_file_paths": [
"results/Huggy1/Huggy/Huggy-599290.pt"
]
},
{
"steps": 799950,
"file_path": "results/Huggy1/Huggy/Huggy-799950.onnx",
"reward": 3.902337409714435,
"creation_time": 1741311717.1735146,
"auxillary_file_paths": [
"results/Huggy1/Huggy/Huggy-799950.pt"
]
},
{
"steps": 999998,
"file_path": "results/Huggy1/Huggy/Huggy-999998.onnx",
"reward": 4.27873916949256,
"creation_time": 1741311958.9197943,
"auxillary_file_paths": [
"results/Huggy1/Huggy/Huggy-999998.pt"
]
},
{
"steps": 1199988,
"file_path": "results/Huggy1/Huggy/Huggy-1199988.onnx",
"reward": 3.630147418095952,
"creation_time": 1741312200.245149,
"auxillary_file_paths": [
"results/Huggy1/Huggy/Huggy-1199988.pt"
]
},
{
"steps": 1399939,
"file_path": "results/Huggy1/Huggy/Huggy-1399939.onnx",
"reward": 3.2124393939971925,
"creation_time": 1741312445.2731678,
"auxillary_file_paths": [
"results/Huggy1/Huggy/Huggy-1399939.pt"
]
},
{
"steps": 1599974,
"file_path": "results/Huggy1/Huggy/Huggy-1599974.onnx",
"reward": 3.8273920838278954,
"creation_time": 1741312685.0249782,
"auxillary_file_paths": [
"results/Huggy1/Huggy/Huggy-1599974.pt"
]
},
{
"steps": 1799988,
"file_path": "results/Huggy1/Huggy/Huggy-1799988.onnx",
"reward": 4.006733905555856,
"creation_time": 1741312931.5004559,
"auxillary_file_paths": [
"results/Huggy1/Huggy/Huggy-1799988.pt"
]
},
{
"steps": 1999992,
"file_path": "results/Huggy1/Huggy/Huggy-1999992.onnx",
"reward": 3.3959962656864753,
"creation_time": 1741313174.5810611,
"auxillary_file_paths": [
"results/Huggy1/Huggy/Huggy-1999992.pt"
]
},
{
"steps": 2000089,
"file_path": "results/Huggy1/Huggy/Huggy-2000089.onnx",
"reward": 3.444523467207855,
"creation_time": 1741313174.69802,
"auxillary_file_paths": [
"results/Huggy1/Huggy/Huggy-2000089.pt"
]
}
],
"final_checkpoint": {
"steps": 2000089,
"file_path": "results/Huggy1/Huggy.onnx",
"reward": 3.444523467207855,
"creation_time": 1741313174.69802,
"auxillary_file_paths": [
"results/Huggy1/Huggy/Huggy-2000089.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}