ppo-Huggy / run_logs /training_status.json
jamesup's picture
Huggy trained
3099df5
{
"Huggy": {
"checkpoints": [
{
"steps": 199867,
"file_path": "results/Huggy/Huggy/Huggy-199867.onnx",
"reward": 3.142867455716993,
"creation_time": 1672827579.9087632,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199867.pt"
]
},
{
"steps": 399879,
"file_path": "results/Huggy/Huggy/Huggy-399879.onnx",
"reward": 3.5338033951562027,
"creation_time": 1672827769.4175994,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399879.pt"
]
},
{
"steps": 599956,
"file_path": "results/Huggy/Huggy/Huggy-599956.onnx",
"reward": 4.080960056998513,
"creation_time": 1672827961.4305594,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599956.pt"
]
},
{
"steps": 799640,
"file_path": "results/Huggy/Huggy/Huggy-799640.onnx",
"reward": 3.768217671785923,
"creation_time": 1672828152.3620288,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799640.pt"
]
},
{
"steps": 999991,
"file_path": "results/Huggy/Huggy/Huggy-999991.onnx",
"reward": 3.5911602580547335,
"creation_time": 1672828345.980938,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999991.pt"
]
},
{
"steps": 1199948,
"file_path": "results/Huggy/Huggy/Huggy-1199948.onnx",
"reward": 3.4128873318433763,
"creation_time": 1672828540.220722,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199948.pt"
]
},
{
"steps": 1399910,
"file_path": "results/Huggy/Huggy/Huggy-1399910.onnx",
"reward": 3.835081160068512,
"creation_time": 1672828733.816257,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399910.pt"
]
},
{
"steps": 1599629,
"file_path": "results/Huggy/Huggy/Huggy-1599629.onnx",
"reward": 3.8157325357687277,
"creation_time": 1672828928.36735,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599629.pt"
]
},
{
"steps": 1799973,
"file_path": "results/Huggy/Huggy/Huggy-1799973.onnx",
"reward": 4.097343473665176,
"creation_time": 1672829123.6667466,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799973.pt"
]
},
{
"steps": 1999978,
"file_path": "results/Huggy/Huggy/Huggy-1999978.onnx",
"reward": 3.498386130431773,
"creation_time": 1672829314.2203395,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999978.pt"
]
},
{
"steps": 2000099,
"file_path": "results/Huggy/Huggy/Huggy-2000099.onnx",
"reward": 3.5134199003583375,
"creation_time": 1672829314.2988496,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000099.pt"
]
}
],
"final_checkpoint": {
"steps": 2000099,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.5134199003583375,
"creation_time": 1672829314.2988496,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000099.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.13.1+cu116"
}
}