ppo-Huggy / run_logs /training_status.json
Junfeng's picture
Huggy
23ca1d7
{
"Huggy": {
"checkpoints": [
{
"steps": 199674,
"file_path": "results/Huggy/Huggy/Huggy-199674.onnx",
"reward": 3.388218302335312,
"creation_time": 1672278760.1563268,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199674.pt"
]
},
{
"steps": 399977,
"file_path": "results/Huggy/Huggy/Huggy-399977.onnx",
"reward": 3.2746806481192188,
"creation_time": 1672279204.8280058,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399977.pt"
]
},
{
"steps": 599975,
"file_path": "results/Huggy/Huggy/Huggy-599975.onnx",
"reward": 3.9699512322743735,
"creation_time": 1672279649.9008176,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599975.pt"
]
},
{
"steps": 799994,
"file_path": "results/Huggy/Huggy/Huggy-799994.onnx",
"reward": 3.790047637269467,
"creation_time": 1672280078.7594259,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799994.pt"
]
},
{
"steps": 999993,
"file_path": "results/Huggy/Huggy/Huggy-999993.onnx",
"reward": 3.5343967779324603,
"creation_time": 1672280523.1206393,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999993.pt"
]
},
{
"steps": 1199418,
"file_path": "results/Huggy/Huggy/Huggy-1199418.onnx",
"reward": 3.59804865880446,
"creation_time": 1672280976.0262952,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199418.pt"
]
},
{
"steps": 1399961,
"file_path": "results/Huggy/Huggy/Huggy-1399961.onnx",
"reward": 3.331585163598532,
"creation_time": 1672281411.9728346,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399961.pt"
]
},
{
"steps": 1599982,
"file_path": "results/Huggy/Huggy/Huggy-1599982.onnx",
"reward": 3.814023378676018,
"creation_time": 1672281868.0244539,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599982.pt"
]
},
{
"steps": 1799964,
"file_path": "results/Huggy/Huggy/Huggy-1799964.onnx",
"reward": 4.143683462039284,
"creation_time": 1672282323.7672706,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799964.pt"
]
},
{
"steps": 1999935,
"file_path": "results/Huggy/Huggy/Huggy-1999935.onnx",
"reward": 3.4489461848748983,
"creation_time": 1672282753.1644347,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999935.pt"
]
},
{
"steps": 2000004,
"file_path": "results/Huggy/Huggy/Huggy-2000004.onnx",
"reward": 3.454611978688083,
"creation_time": 1672282753.261614,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000004.pt"
]
}
],
"final_checkpoint": {
"steps": 2000004,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.454611978688083,
"creation_time": 1672282753.261614,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000004.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}