ppo-Huggy / run_logs /training_status.json
Lew's picture
Huggy
3e990db
{
"Huggy": {
"checkpoints": [
{
"steps": 199793,
"file_path": "results/Huggy/Huggy/Huggy-199793.onnx",
"reward": 3.826720997398975,
"creation_time": 1699600756.9610212,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199793.pt"
]
},
{
"steps": 399620,
"file_path": "results/Huggy/Huggy/Huggy-399620.onnx",
"reward": 3.453649189074834,
"creation_time": 1699600987.3760552,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399620.pt"
]
},
{
"steps": 599966,
"file_path": "results/Huggy/Huggy/Huggy-599966.onnx",
"reward": 3.9556915283203127,
"creation_time": 1699601220.293754,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599966.pt"
]
},
{
"steps": 799994,
"file_path": "results/Huggy/Huggy/Huggy-799994.onnx",
"reward": 3.863225690564331,
"creation_time": 1699601451.1357698,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799994.pt"
]
},
{
"steps": 999987,
"file_path": "results/Huggy/Huggy/Huggy-999987.onnx",
"reward": 3.7605467001339057,
"creation_time": 1699601689.8489196,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999987.pt"
]
},
{
"steps": 1199923,
"file_path": "results/Huggy/Huggy/Huggy-1199923.onnx",
"reward": 3.732977070926148,
"creation_time": 1699601927.588612,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199923.pt"
]
},
{
"steps": 1399967,
"file_path": "results/Huggy/Huggy/Huggy-1399967.onnx",
"reward": 3.97332709416365,
"creation_time": 1699602162.2462246,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399967.pt"
]
},
{
"steps": 1599917,
"file_path": "results/Huggy/Huggy/Huggy-1599917.onnx",
"reward": 3.9042525923735387,
"creation_time": 1699602399.1362724,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599917.pt"
]
},
{
"steps": 1799987,
"file_path": "results/Huggy/Huggy/Huggy-1799987.onnx",
"reward": 3.9021508606468758,
"creation_time": 1699602635.6474235,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799987.pt"
]
},
{
"steps": 1999945,
"file_path": "results/Huggy/Huggy/Huggy-1999945.onnx",
"reward": 3.4030708596110344,
"creation_time": 1699602873.6847825,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999945.pt"
]
},
{
"steps": 2000083,
"file_path": "results/Huggy/Huggy/Huggy-2000083.onnx",
"reward": 3.5638194855521705,
"creation_time": 1699602873.7715542,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000083.pt"
]
}
],
"final_checkpoint": {
"steps": 2000083,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.5638194855521705,
"creation_time": 1699602873.7715542,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000083.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu118"
}
}