ppo-Huggy / run_logs /training_status.json
varunsappa's picture
Huggy
1fa867c
{
"Huggy": {
"checkpoints": [
{
"steps": 199936,
"file_path": "results/Huggy/Huggy/Huggy-199936.onnx",
"reward": 3.5837768241763115,
"creation_time": 1702225673.7399218,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199936.pt"
]
},
{
"steps": 399832,
"file_path": "results/Huggy/Huggy/Huggy-399832.onnx",
"reward": 4.050391882213194,
"creation_time": 1702225907.039832,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399832.pt"
]
},
{
"steps": 599940,
"file_path": "results/Huggy/Huggy/Huggy-599940.onnx",
"reward": 3.5966106982066712,
"creation_time": 1702226141.9662817,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599940.pt"
]
},
{
"steps": 799990,
"file_path": "results/Huggy/Huggy/Huggy-799990.onnx",
"reward": 3.8665081979353215,
"creation_time": 1702226372.925509,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799990.pt"
]
},
{
"steps": 999911,
"file_path": "results/Huggy/Huggy/Huggy-999911.onnx",
"reward": 3.852497242093086,
"creation_time": 1702226605.2699358,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999911.pt"
]
},
{
"steps": 1199999,
"file_path": "results/Huggy/Huggy/Huggy-1199999.onnx",
"reward": 3.9107765060790043,
"creation_time": 1702226840.0682411,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199999.pt"
]
},
{
"steps": 1399886,
"file_path": "results/Huggy/Huggy/Huggy-1399886.onnx",
"reward": 3.806372381093209,
"creation_time": 1702227071.4530206,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399886.pt"
]
},
{
"steps": 1599904,
"file_path": "results/Huggy/Huggy/Huggy-1599904.onnx",
"reward": 3.6531164840210315,
"creation_time": 1702227307.9781077,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599904.pt"
]
},
{
"steps": 1799942,
"file_path": "results/Huggy/Huggy/Huggy-1799942.onnx",
"reward": 3.607348253991869,
"creation_time": 1702227543.5220451,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799942.pt"
]
},
{
"steps": 1999991,
"file_path": "results/Huggy/Huggy/Huggy-1999991.onnx",
"reward": 3.908694363794019,
"creation_time": 1702227773.2490504,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999991.pt"
]
},
{
"steps": 2000066,
"file_path": "results/Huggy/Huggy/Huggy-2000066.onnx",
"reward": 3.9175500636681533,
"creation_time": 1702227773.3514686,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000066.pt"
]
}
],
"final_checkpoint": {
"steps": 2000066,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.9175500636681533,
"creation_time": 1702227773.3514686,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000066.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.1+cu121"
}
}