ppo-Huggy / run_logs /training_status.json
TaTo69's picture
Huggy
9e6d089
{
"Huggy": {
"checkpoints": [
{
"steps": 199991,
"file_path": "results/Huggy/Huggy/Huggy-199991.onnx",
"reward": 3.375974967408536,
"creation_time": 1698743850.6424203,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199991.pt"
]
},
{
"steps": 399927,
"file_path": "results/Huggy/Huggy/Huggy-399927.onnx",
"reward": 3.595268755742948,
"creation_time": 1698744078.1482995,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399927.pt"
]
},
{
"steps": 599894,
"file_path": "results/Huggy/Huggy/Huggy-599894.onnx",
"reward": 3.728753674488801,
"creation_time": 1698744308.714109,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599894.pt"
]
},
{
"steps": 799961,
"file_path": "results/Huggy/Huggy/Huggy-799961.onnx",
"reward": 3.903100172962461,
"creation_time": 1698744537.6586175,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799961.pt"
]
},
{
"steps": 999940,
"file_path": "results/Huggy/Huggy/Huggy-999940.onnx",
"reward": 3.919544122806967,
"creation_time": 1698744769.3233225,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999940.pt"
]
},
{
"steps": 1199995,
"file_path": "results/Huggy/Huggy/Huggy-1199995.onnx",
"reward": 3.884939282498461,
"creation_time": 1698745001.8123534,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199995.pt"
]
},
{
"steps": 1399996,
"file_path": "results/Huggy/Huggy/Huggy-1399996.onnx",
"reward": 3.7979475351480336,
"creation_time": 1698745237.1110363,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399996.pt"
]
},
{
"steps": 1599957,
"file_path": "results/Huggy/Huggy/Huggy-1599957.onnx",
"reward": 4.021838749924751,
"creation_time": 1698745466.7102754,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599957.pt"
]
},
{
"steps": 1799999,
"file_path": "results/Huggy/Huggy/Huggy-1799999.onnx",
"reward": 3.676902900967333,
"creation_time": 1698745700.4576156,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799999.pt"
]
},
{
"steps": 1999987,
"file_path": "results/Huggy/Huggy/Huggy-1999987.onnx",
"reward": 3.6543792067347347,
"creation_time": 1698745935.8123012,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999987.pt"
]
},
{
"steps": 2000042,
"file_path": "results/Huggy/Huggy/Huggy-2000042.onnx",
"reward": 3.641928482055664,
"creation_time": 1698745935.9179869,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000042.pt"
]
}
],
"final_checkpoint": {
"steps": 2000042,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.641928482055664,
"creation_time": 1698745935.9179869,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000042.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu118"
}
}