ppo-Huggy / run_logs /training_status.json
Samalabama66's picture
Huggy
f981894
{
"Huggy": {
"checkpoints": [
{
"steps": 199974,
"file_path": "results/Huggy/Huggy/Huggy-199974.onnx",
"reward": 3.192696846090257,
"creation_time": 1690178532.5265431,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199974.pt"
]
},
{
"steps": 399859,
"file_path": "results/Huggy/Huggy/Huggy-399859.onnx",
"reward": 4.513306953690269,
"creation_time": 1690178794.9390447,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399859.pt"
]
},
{
"steps": 599925,
"file_path": "results/Huggy/Huggy/Huggy-599925.onnx",
"reward": 3.6556775136427446,
"creation_time": 1690179058.8275516,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599925.pt"
]
},
{
"steps": 799925,
"file_path": "results/Huggy/Huggy/Huggy-799925.onnx",
"reward": 3.9090510293415615,
"creation_time": 1690179324.483843,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799925.pt"
]
},
{
"steps": 999946,
"file_path": "results/Huggy/Huggy/Huggy-999946.onnx",
"reward": 4.021956877726497,
"creation_time": 1690179595.2956505,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999946.pt"
]
},
{
"steps": 1199957,
"file_path": "results/Huggy/Huggy/Huggy-1199957.onnx",
"reward": 3.9886621996760367,
"creation_time": 1690179866.0672076,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199957.pt"
]
},
{
"steps": 1399917,
"file_path": "results/Huggy/Huggy/Huggy-1399917.onnx",
"reward": null,
"creation_time": 1690180135.0572777,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399917.pt"
]
},
{
"steps": 1599920,
"file_path": "results/Huggy/Huggy/Huggy-1599920.onnx",
"reward": 4.124009740352631,
"creation_time": 1690180399.92284,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599920.pt"
]
},
{
"steps": 1799990,
"file_path": "results/Huggy/Huggy/Huggy-1799990.onnx",
"reward": 4.083499656718912,
"creation_time": 1690180667.8013437,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799990.pt"
]
},
{
"steps": 1999926,
"file_path": "results/Huggy/Huggy/Huggy-1999926.onnx",
"reward": 3.5951882045377386,
"creation_time": 1690180951.9673529,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999926.pt"
]
},
{
"steps": 2000005,
"file_path": "results/Huggy/Huggy/Huggy-2000005.onnx",
"reward": 3.6003976861635842,
"creation_time": 1690180952.197558,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000005.pt"
]
}
],
"final_checkpoint": {
"steps": 2000005,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.6003976861635842,
"creation_time": 1690180952.197558,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000005.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}