ppo-Huggy / run_logs /training_status.json
ENmoss's picture
Huggy
31af150
{
"Huggy": {
"checkpoints": [
{
"steps": 199706,
"file_path": "results/Huggy/Huggy/Huggy-199706.onnx",
"reward": 3.2291786755834306,
"creation_time": 1700069539.671613,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199706.pt"
]
},
{
"steps": 399683,
"file_path": "results/Huggy/Huggy/Huggy-399683.onnx",
"reward": 3.9173193862361293,
"creation_time": 1700069795.24524,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399683.pt"
]
},
{
"steps": 599992,
"file_path": "results/Huggy/Huggy/Huggy-599992.onnx",
"reward": 3.719147040293767,
"creation_time": 1700070045.8335798,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599992.pt"
]
},
{
"steps": 799952,
"file_path": "results/Huggy/Huggy/Huggy-799952.onnx",
"reward": 3.4674018112953098,
"creation_time": 1700070290.9506357,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799952.pt"
]
},
{
"steps": 999870,
"file_path": "results/Huggy/Huggy/Huggy-999870.onnx",
"reward": 3.9047887102375185,
"creation_time": 1700070542.0217779,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999870.pt"
]
},
{
"steps": 1199942,
"file_path": "results/Huggy/Huggy/Huggy-1199942.onnx",
"reward": 3.831996016204357,
"creation_time": 1700070782.9053512,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199942.pt"
]
},
{
"steps": 1399878,
"file_path": "results/Huggy/Huggy/Huggy-1399878.onnx",
"reward": 4.0801990807056425,
"creation_time": 1700071021.9750972,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399878.pt"
]
},
{
"steps": 1599998,
"file_path": "results/Huggy/Huggy/Huggy-1599998.onnx",
"reward": 3.9515313022940823,
"creation_time": 1700071259.7870824,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599998.pt"
]
},
{
"steps": 1799954,
"file_path": "results/Huggy/Huggy/Huggy-1799954.onnx",
"reward": 3.899457685967796,
"creation_time": 1700071509.7339728,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799954.pt"
]
},
{
"steps": 1999941,
"file_path": "results/Huggy/Huggy/Huggy-1999941.onnx",
"reward": 3.861564307501822,
"creation_time": 1700071756.8991182,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999941.pt"
]
},
{
"steps": 2000073,
"file_path": "results/Huggy/Huggy/Huggy-2000073.onnx",
"reward": 3.9473987642456505,
"creation_time": 1700071757.002195,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000073.pt"
]
}
],
"final_checkpoint": {
"steps": 2000073,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.9473987642456505,
"creation_time": 1700071757.002195,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000073.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu118"
}
}