ppo-Huggy / run_logs /training_status.json
worsty's picture
Push Huggy to the Hub
caf1429
{
"Huggy": {
"checkpoints": [
{
"steps": 199932,
"file_path": "results/Huggy/Huggy/Huggy-199932.onnx",
"reward": 3.064953052146094,
"creation_time": 1673176310.0145397,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199932.pt"
]
},
{
"steps": 399945,
"file_path": "results/Huggy/Huggy/Huggy-399945.onnx",
"reward": 3.842559428847566,
"creation_time": 1673176524.355222,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399945.pt"
]
},
{
"steps": 599936,
"file_path": "results/Huggy/Huggy/Huggy-599936.onnx",
"reward": 4.246101260185242,
"creation_time": 1673176738.9927256,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599936.pt"
]
},
{
"steps": 799871,
"file_path": "results/Huggy/Huggy/Huggy-799871.onnx",
"reward": 3.560505186888709,
"creation_time": 1673176952.2633069,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799871.pt"
]
},
{
"steps": 999954,
"file_path": "results/Huggy/Huggy/Huggy-999954.onnx",
"reward": 3.912847949476803,
"creation_time": 1673177171.3830538,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999954.pt"
]
},
{
"steps": 1199993,
"file_path": "results/Huggy/Huggy/Huggy-1199993.onnx",
"reward": 3.6209317445755005,
"creation_time": 1673177386.0114877,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199993.pt"
]
},
{
"steps": 1399964,
"file_path": "results/Huggy/Huggy/Huggy-1399964.onnx",
"reward": 3.72540297734192,
"creation_time": 1673177597.8409429,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399964.pt"
]
},
{
"steps": 1599966,
"file_path": "results/Huggy/Huggy/Huggy-1599966.onnx",
"reward": 3.5977618405693454,
"creation_time": 1673177811.752202,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599966.pt"
]
},
{
"steps": 1799969,
"file_path": "results/Huggy/Huggy/Huggy-1799969.onnx",
"reward": 3.555952671097546,
"creation_time": 1673178029.0742362,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799969.pt"
]
},
{
"steps": 1999971,
"file_path": "results/Huggy/Huggy/Huggy-1999971.onnx",
"reward": 3.625383899655453,
"creation_time": 1673178240.3957994,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999971.pt"
]
},
{
"steps": 2000084,
"file_path": "results/Huggy/Huggy/Huggy-2000084.onnx",
"reward": 3.64244764939898,
"creation_time": 1673178240.5244818,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000084.pt"
]
}
],
"final_checkpoint": {
"steps": 2000084,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.64244764939898,
"creation_time": 1673178240.5244818,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000084.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}