ppo-Huggy / run_logs /training_status.json
RealityAdmin's picture
First time withHuggy
4d52c1e verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199815,
"file_path": "results/Huggy2/Huggy/Huggy-199815.onnx",
"reward": 3.163357090204954,
"creation_time": 1733082800.5599358,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199815.pt"
]
},
{
"steps": 399902,
"file_path": "results/Huggy2/Huggy/Huggy-399902.onnx",
"reward": 3.4277140011250133,
"creation_time": 1733083068.0435853,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399902.pt"
]
},
{
"steps": 599982,
"file_path": "results/Huggy2/Huggy/Huggy-599982.onnx",
"reward": 3.94407363070382,
"creation_time": 1733083337.0757961,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599982.pt"
]
},
{
"steps": 799844,
"file_path": "results/Huggy2/Huggy/Huggy-799844.onnx",
"reward": 3.764042385135378,
"creation_time": 1733083603.203709,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799844.pt"
]
},
{
"steps": 999938,
"file_path": "results/Huggy2/Huggy/Huggy-999938.onnx",
"reward": 4.226468920707703,
"creation_time": 1733083867.9130957,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999938.pt"
]
},
{
"steps": 1199985,
"file_path": "results/Huggy2/Huggy/Huggy-1199985.onnx",
"reward": 4.0859438066613185,
"creation_time": 1733084126.291287,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199985.pt"
]
},
{
"steps": 1399954,
"file_path": "results/Huggy2/Huggy/Huggy-1399954.onnx",
"reward": 3.5854161580403647,
"creation_time": 1733084392.5797765,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399954.pt"
]
},
{
"steps": 1599866,
"file_path": "results/Huggy2/Huggy/Huggy-1599866.onnx",
"reward": 4.017861785115422,
"creation_time": 1733084656.2247522,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599866.pt"
]
},
{
"steps": 1799987,
"file_path": "results/Huggy2/Huggy/Huggy-1799987.onnx",
"reward": 3.8107588754759893,
"creation_time": 1733084922.5872297,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799987.pt"
]
},
{
"steps": 1999992,
"file_path": "results/Huggy2/Huggy/Huggy-1999992.onnx",
"reward": 3.5641276597976685,
"creation_time": 1733085183.769092,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999992.pt"
]
},
{
"steps": 2000054,
"file_path": "results/Huggy2/Huggy/Huggy-2000054.onnx",
"reward": 3.521858761950237,
"creation_time": 1733085183.9000094,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000054.pt"
]
}
],
"final_checkpoint": {
"steps": 2000054,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.521858761950237,
"creation_time": 1733085183.9000094,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000054.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.1+cu121"
}
}