ppo-Huggy / run_logs /training_status.json
multitude0099's picture
Huggy ppo model v1
ddc0f98
{
"Huggy": {
"checkpoints": [
{
"steps": 6051,
"file_path": "results/Huggy/Huggy/Huggy-6051.onnx",
"reward": 1.6526724427938462,
"creation_time": 1688516056.731591,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-6051.pt"
]
},
{
"steps": 199941,
"file_path": "results/Huggy/Huggy/Huggy-199941.onnx",
"reward": 3.2149056868675427,
"creation_time": 1688516292.5034943,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199941.pt"
]
},
{
"steps": 399989,
"file_path": "results/Huggy/Huggy/Huggy-399989.onnx",
"reward": 4.82896106893366,
"creation_time": 1688516522.232258,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399989.pt"
]
},
{
"steps": 599963,
"file_path": "results/Huggy/Huggy/Huggy-599963.onnx",
"reward": 3.8327649915919584,
"creation_time": 1688516752.002756,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599963.pt"
]
},
{
"steps": 799962,
"file_path": "results/Huggy/Huggy/Huggy-799962.onnx",
"reward": 3.69189544695395,
"creation_time": 1688516985.366939,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799962.pt"
]
},
{
"steps": 999936,
"file_path": "results/Huggy/Huggy/Huggy-999936.onnx",
"reward": 4.035131003612127,
"creation_time": 1688517216.3872051,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999936.pt"
]
},
{
"steps": 1199965,
"file_path": "results/Huggy/Huggy/Huggy-1199965.onnx",
"reward": 3.3587620390786066,
"creation_time": 1688517451.6999729,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199965.pt"
]
},
{
"steps": 1399926,
"file_path": "results/Huggy/Huggy/Huggy-1399926.onnx",
"reward": 3.5551410582330494,
"creation_time": 1688517683.5367837,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399926.pt"
]
},
{
"steps": 1599366,
"file_path": "results/Huggy/Huggy/Huggy-1599366.onnx",
"reward": 3.806959377001908,
"creation_time": 1688517918.485986,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599366.pt"
]
},
{
"steps": 1799973,
"file_path": "results/Huggy/Huggy/Huggy-1799973.onnx",
"reward": 3.70026140269779,
"creation_time": 1688518156.3879275,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799973.pt"
]
},
{
"steps": 1999900,
"file_path": "results/Huggy/Huggy/Huggy-1999900.onnx",
"reward": 3.4040366004965157,
"creation_time": 1688518385.92978,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999900.pt"
]
},
{
"steps": 2000023,
"file_path": "results/Huggy/Huggy/Huggy-2000023.onnx",
"reward": 3.408464414543576,
"creation_time": 1688518386.0521944,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000023.pt"
]
}
],
"final_checkpoint": {
"steps": 2000023,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.408464414543576,
"creation_time": 1688518386.0521944,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000023.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}