ppo-Huggy / run_logs /training_status.json
sam133's picture
Huggy
743ec85
{
"Huggy": {
"checkpoints": [
{
"steps": 199908,
"file_path": "results/Huggy_1/Huggy/Huggy-199908.onnx",
"reward": 3.167510991227137,
"creation_time": 1671135002.7330427,
"auxillary_file_paths": [
"results/Huggy_1/Huggy/Huggy-199908.pt"
]
},
{
"steps": 399881,
"file_path": "results/Huggy_1/Huggy/Huggy-399881.onnx",
"reward": 3.549771926174425,
"creation_time": 1671135224.471385,
"auxillary_file_paths": [
"results/Huggy_1/Huggy/Huggy-399881.pt"
]
},
{
"steps": 599988,
"file_path": "results/Huggy_1/Huggy/Huggy-599988.onnx",
"reward": 3.7410019215415504,
"creation_time": 1671135451.0136318,
"auxillary_file_paths": [
"results/Huggy_1/Huggy/Huggy-599988.pt"
]
},
{
"steps": 799975,
"file_path": "results/Huggy_1/Huggy/Huggy-799975.onnx",
"reward": 3.7945296757895015,
"creation_time": 1671135676.3133225,
"auxillary_file_paths": [
"results/Huggy_1/Huggy/Huggy-799975.pt"
]
},
{
"steps": 999986,
"file_path": "results/Huggy_1/Huggy/Huggy-999986.onnx",
"reward": 3.7883526428540546,
"creation_time": 1671135908.212385,
"auxillary_file_paths": [
"results/Huggy_1/Huggy/Huggy-999986.pt"
]
},
{
"steps": 1199956,
"file_path": "results/Huggy_1/Huggy/Huggy-1199956.onnx",
"reward": 3.9851347171154217,
"creation_time": 1671136137.1533866,
"auxillary_file_paths": [
"results/Huggy_1/Huggy/Huggy-1199956.pt"
]
},
{
"steps": 1399962,
"file_path": "results/Huggy_1/Huggy/Huggy-1399962.onnx",
"reward": 4.231970875263214,
"creation_time": 1671136361.887324,
"auxillary_file_paths": [
"results/Huggy_1/Huggy/Huggy-1399962.pt"
]
},
{
"steps": 1599987,
"file_path": "results/Huggy_1/Huggy/Huggy-1599987.onnx",
"reward": 3.8669480930727262,
"creation_time": 1671136583.390927,
"auxillary_file_paths": [
"results/Huggy_1/Huggy/Huggy-1599987.pt"
]
},
{
"steps": 1799984,
"file_path": "results/Huggy_1/Huggy/Huggy-1799984.onnx",
"reward": 3.7126698372885585,
"creation_time": 1671136811.8308952,
"auxillary_file_paths": [
"results/Huggy_1/Huggy/Huggy-1799984.pt"
]
},
{
"steps": 1999988,
"file_path": "results/Huggy_1/Huggy/Huggy-1999988.onnx",
"reward": 4.036154627799988,
"creation_time": 1671137036.2856255,
"auxillary_file_paths": [
"results/Huggy_1/Huggy/Huggy-1999988.pt"
]
},
{
"steps": 2000040,
"file_path": "results/Huggy_1/Huggy/Huggy-2000040.onnx",
"reward": 4.000109920899074,
"creation_time": 1671137036.408669,
"auxillary_file_paths": [
"results/Huggy_1/Huggy/Huggy-2000040.pt"
]
}
],
"final_checkpoint": {
"steps": 2000040,
"file_path": "results/Huggy_1/Huggy.onnx",
"reward": 4.000109920899074,
"creation_time": 1671137036.408669,
"auxillary_file_paths": [
"results/Huggy_1/Huggy/Huggy-2000040.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}