ppo-Huggy / run_logs /training_status.json
feratur's picture
Huggy
35af0a2
{
"Huggy": {
"checkpoints": [
{
"steps": 199855,
"file_path": "results/Huggy/Huggy/Huggy-199855.onnx",
"reward": 3.438227671198547,
"creation_time": 1679774569.4322953,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199855.pt"
]
},
{
"steps": 399953,
"file_path": "results/Huggy/Huggy/Huggy-399953.onnx",
"reward": 3.7053000906057525,
"creation_time": 1679774844.1286967,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399953.pt"
]
},
{
"steps": 599960,
"file_path": "results/Huggy/Huggy/Huggy-599960.onnx",
"reward": 3.7622969269752504,
"creation_time": 1679775123.7099218,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599960.pt"
]
},
{
"steps": 799958,
"file_path": "results/Huggy/Huggy/Huggy-799958.onnx",
"reward": 3.955744524357727,
"creation_time": 1679775400.7244437,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799958.pt"
]
},
{
"steps": 999994,
"file_path": "results/Huggy/Huggy/Huggy-999994.onnx",
"reward": 3.831655774296833,
"creation_time": 1679775684.3767495,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999994.pt"
]
},
{
"steps": 1199931,
"file_path": "results/Huggy/Huggy/Huggy-1199931.onnx",
"reward": 3.738777709678865,
"creation_time": 1679775948.848943,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199931.pt"
]
},
{
"steps": 1399957,
"file_path": "results/Huggy/Huggy/Huggy-1399957.onnx",
"reward": 3.9430530361139056,
"creation_time": 1679776207.042882,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399957.pt"
]
},
{
"steps": 1599933,
"file_path": "results/Huggy/Huggy/Huggy-1599933.onnx",
"reward": 3.725927989413268,
"creation_time": 1679776477.5261815,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599933.pt"
]
},
{
"steps": 1799960,
"file_path": "results/Huggy/Huggy/Huggy-1799960.onnx",
"reward": 3.7688860334455967,
"creation_time": 1679776753.3469777,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799960.pt"
]
},
{
"steps": 1999755,
"file_path": "results/Huggy/Huggy/Huggy-1999755.onnx",
"reward": 2.9631230235099792,
"creation_time": 1679777012.2363634,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999755.pt"
]
},
{
"steps": 2000505,
"file_path": "results/Huggy/Huggy/Huggy-2000505.onnx",
"reward": 2.2660498883989124,
"creation_time": 1679777012.3928595,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000505.pt"
]
}
],
"final_checkpoint": {
"steps": 2000505,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 2.2660498883989124,
"creation_time": 1679777012.3928595,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000505.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}