ppo-Huggy / run_logs /training_status.json
EchineF's picture
Huggy ppo uploaded
b9af4f1 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199883,
"file_path": "results/Huggy2/Huggy/Huggy-199883.onnx",
"reward": 3.135871306806803,
"creation_time": 1708516760.2391202,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199883.pt"
]
},
{
"steps": 399948,
"file_path": "results/Huggy2/Huggy/Huggy-399948.onnx",
"reward": 3.4878695143593683,
"creation_time": 1708517004.3799996,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399948.pt"
]
},
{
"steps": 599993,
"file_path": "results/Huggy2/Huggy/Huggy-599993.onnx",
"reward": 4.284962264696757,
"creation_time": 1708517250.0957139,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599993.pt"
]
},
{
"steps": 799953,
"file_path": "results/Huggy2/Huggy/Huggy-799953.onnx",
"reward": 3.7226656144857406,
"creation_time": 1708517495.5117974,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799953.pt"
]
},
{
"steps": 999960,
"file_path": "results/Huggy2/Huggy/Huggy-999960.onnx",
"reward": 3.9701862963182584,
"creation_time": 1708517742.9144354,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999960.pt"
]
},
{
"steps": 1199889,
"file_path": "results/Huggy2/Huggy/Huggy-1199889.onnx",
"reward": 3.637924088608651,
"creation_time": 1708517992.1608155,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199889.pt"
]
},
{
"steps": 1399992,
"file_path": "results/Huggy2/Huggy/Huggy-1399992.onnx",
"reward": 3.556669089529249,
"creation_time": 1708518241.6167157,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399992.pt"
]
},
{
"steps": 1599932,
"file_path": "results/Huggy2/Huggy/Huggy-1599932.onnx",
"reward": 4.006813472540921,
"creation_time": 1708518490.5078118,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599932.pt"
]
},
{
"steps": 1799361,
"file_path": "results/Huggy2/Huggy/Huggy-1799361.onnx",
"reward": 3.869224651293321,
"creation_time": 1708518743.2545354,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799361.pt"
]
},
{
"steps": 1999927,
"file_path": "results/Huggy2/Huggy/Huggy-1999927.onnx",
"reward": 3.802988812128703,
"creation_time": 1708518993.6321788,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999927.pt"
]
},
{
"steps": 2000031,
"file_path": "results/Huggy2/Huggy/Huggy-2000031.onnx",
"reward": 3.8234458126519857,
"creation_time": 1708518993.7523968,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000031.pt"
]
}
],
"final_checkpoint": {
"steps": 2000031,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.8234458126519857,
"creation_time": 1708518993.7523968,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000031.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.0+cu121"
}
}