ppo-Huggy / run_logs /training_status.json
ThatOneSkyler's picture
Huggy
ef0d266 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199834,
"file_path": "results/Huggy2/Huggy/Huggy-199834.onnx",
"reward": 3.3974214479571483,
"creation_time": 1713963826.4612253,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199834.pt"
]
},
{
"steps": 399814,
"file_path": "results/Huggy2/Huggy/Huggy-399814.onnx",
"reward": 3.3299718610942364,
"creation_time": 1713964073.5439467,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399814.pt"
]
},
{
"steps": 599827,
"file_path": "results/Huggy2/Huggy/Huggy-599827.onnx",
"reward": 3.9656515568494797,
"creation_time": 1713964317.9150352,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599827.pt"
]
},
{
"steps": 799993,
"file_path": "results/Huggy2/Huggy/Huggy-799993.onnx",
"reward": 3.6363908760888237,
"creation_time": 1713964563.0842576,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799993.pt"
]
},
{
"steps": 999750,
"file_path": "results/Huggy2/Huggy/Huggy-999750.onnx",
"reward": 4.071723246377362,
"creation_time": 1713964812.6595,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999750.pt"
]
},
{
"steps": 1199870,
"file_path": "results/Huggy2/Huggy/Huggy-1199870.onnx",
"reward": 3.826645664870739,
"creation_time": 1713965061.62984,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199870.pt"
]
},
{
"steps": 1399319,
"file_path": "results/Huggy2/Huggy/Huggy-1399319.onnx",
"reward": 3.7770796420949475,
"creation_time": 1713965306.773791,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399319.pt"
]
},
{
"steps": 1599943,
"file_path": "results/Huggy2/Huggy/Huggy-1599943.onnx",
"reward": 3.8687425025578204,
"creation_time": 1713965556.981853,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599943.pt"
]
},
{
"steps": 1799617,
"file_path": "results/Huggy2/Huggy/Huggy-1799617.onnx",
"reward": 3.4617113679954685,
"creation_time": 1713965803.9902253,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799617.pt"
]
},
{
"steps": 1999800,
"file_path": "results/Huggy2/Huggy/Huggy-1999800.onnx",
"reward": 3.8725091763905115,
"creation_time": 1713966055.4014525,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999800.pt"
]
},
{
"steps": 2000550,
"file_path": "results/Huggy2/Huggy/Huggy-2000550.onnx",
"reward": 3.690076493554645,
"creation_time": 1713966055.5496693,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000550.pt"
]
}
],
"final_checkpoint": {
"steps": 2000550,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.690076493554645,
"creation_time": 1713966055.5496693,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000550.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}