ppo-huggy / run_logs /training_status.json
UXAIR's picture
Huggy
934235c verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199955,
"file_path": "results/Huggy2/Huggy/Huggy-199955.onnx",
"reward": 3.298078626394272,
"creation_time": 1713351701.8627627,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199955.pt"
]
},
{
"steps": 399995,
"file_path": "results/Huggy2/Huggy/Huggy-399995.onnx",
"reward": 3.576456661093725,
"creation_time": 1713351935.6680725,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399995.pt"
]
},
{
"steps": 599896,
"file_path": "results/Huggy2/Huggy/Huggy-599896.onnx",
"reward": 4.16029538154602,
"creation_time": 1713352174.9721298,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599896.pt"
]
},
{
"steps": 799990,
"file_path": "results/Huggy2/Huggy/Huggy-799990.onnx",
"reward": 3.651282769441605,
"creation_time": 1713352409.128933,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799990.pt"
]
},
{
"steps": 999849,
"file_path": "results/Huggy2/Huggy/Huggy-999849.onnx",
"reward": 3.8393761030177482,
"creation_time": 1713352652.1649714,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999849.pt"
]
},
{
"steps": 1199445,
"file_path": "results/Huggy2/Huggy/Huggy-1199445.onnx",
"reward": 4.571528908610344,
"creation_time": 1713352893.118094,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199445.pt"
]
},
{
"steps": 1399996,
"file_path": "results/Huggy2/Huggy/Huggy-1399996.onnx",
"reward": 3.7022098275278346,
"creation_time": 1713353132.3497317,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399996.pt"
]
},
{
"steps": 1599978,
"file_path": "results/Huggy2/Huggy/Huggy-1599978.onnx",
"reward": 3.3732883959281734,
"creation_time": 1713353368.6325817,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599978.pt"
]
},
{
"steps": 1799991,
"file_path": "results/Huggy2/Huggy/Huggy-1799991.onnx",
"reward": 3.5052062273025513,
"creation_time": 1713353606.331678,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799991.pt"
]
},
{
"steps": 1999940,
"file_path": "results/Huggy2/Huggy/Huggy-1999940.onnx",
"reward": 3.5351366107143574,
"creation_time": 1713353840.7338228,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999940.pt"
]
},
{
"steps": 2000018,
"file_path": "results/Huggy2/Huggy/Huggy-2000018.onnx",
"reward": 3.551716755054615,
"creation_time": 1713353840.8514898,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000018.pt"
]
}
],
"final_checkpoint": {
"steps": 2000018,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.551716755054615,
"creation_time": 1713353840.8514898,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000018.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}