ppo-Huggy / run_logs /training_status.json
Arindam1975's picture
Push Huggy to the Hub
ab45227
{
"Huggy": {
"checkpoints": [
{
"steps": 199968,
"file_path": "results/Huggy/Huggy/Huggy-199968.onnx",
"reward": 3.433215477527716,
"creation_time": 1683605168.4924276,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199968.pt"
]
},
{
"steps": 399990,
"file_path": "results/Huggy/Huggy/Huggy-399990.onnx",
"reward": 3.7959498250693606,
"creation_time": 1683605406.9639409,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399990.pt"
]
},
{
"steps": 599962,
"file_path": "results/Huggy/Huggy/Huggy-599962.onnx",
"reward": 3.0897852778434753,
"creation_time": 1683605649.2613385,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599962.pt"
]
},
{
"steps": 799875,
"file_path": "results/Huggy/Huggy/Huggy-799875.onnx",
"reward": 3.897483345076262,
"creation_time": 1683605885.4499054,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799875.pt"
]
},
{
"steps": 999809,
"file_path": "results/Huggy/Huggy/Huggy-999809.onnx",
"reward": 3.7266083618868953,
"creation_time": 1683606127.0647922,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999809.pt"
]
},
{
"steps": 1199650,
"file_path": "results/Huggy/Huggy/Huggy-1199650.onnx",
"reward": 4.151005198315876,
"creation_time": 1683606371.2864456,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199650.pt"
]
},
{
"steps": 1399963,
"file_path": "results/Huggy/Huggy/Huggy-1399963.onnx",
"reward": 3.6064202817001005,
"creation_time": 1683606606.094542,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399963.pt"
]
},
{
"steps": 1599975,
"file_path": "results/Huggy/Huggy/Huggy-1599975.onnx",
"reward": 3.877130535352661,
"creation_time": 1683606843.8534088,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599975.pt"
]
},
{
"steps": 1799899,
"file_path": "results/Huggy/Huggy/Huggy-1799899.onnx",
"reward": 3.646409932996186,
"creation_time": 1683607086.0280707,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799899.pt"
]
},
{
"steps": 1999948,
"file_path": "results/Huggy/Huggy/Huggy-1999948.onnx",
"reward": 3.9171302169561386,
"creation_time": 1683607325.0312169,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999948.pt"
]
},
{
"steps": 2000698,
"file_path": "results/Huggy/Huggy/Huggy-2000698.onnx",
"reward": 3.106169104576111,
"creation_time": 1683607325.1960535,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000698.pt"
]
}
],
"final_checkpoint": {
"steps": 2000698,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.106169104576111,
"creation_time": 1683607325.1960535,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000698.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}