ppo-Huggy / run_logs /training_status.json
bluestyle97's picture
Huggy
3fbf444 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199868,
"file_path": "results/Huggy2/Huggy/Huggy-199868.onnx",
"reward": 3.40694127480189,
"creation_time": 1742034674.668337,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199868.pt"
]
},
{
"steps": 399837,
"file_path": "results/Huggy2/Huggy/Huggy-399837.onnx",
"reward": 4.033740682848569,
"creation_time": 1742034926.191265,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399837.pt"
]
},
{
"steps": 599913,
"file_path": "results/Huggy2/Huggy/Huggy-599913.onnx",
"reward": 4.009097364815799,
"creation_time": 1742035182.002662,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599913.pt"
]
},
{
"steps": 799990,
"file_path": "results/Huggy2/Huggy/Huggy-799990.onnx",
"reward": 3.9319140565831487,
"creation_time": 1742035434.25791,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799990.pt"
]
},
{
"steps": 999996,
"file_path": "results/Huggy2/Huggy/Huggy-999996.onnx",
"reward": 3.7851544557330765,
"creation_time": 1742035686.8635113,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999996.pt"
]
},
{
"steps": 1199963,
"file_path": "results/Huggy2/Huggy/Huggy-1199963.onnx",
"reward": 3.5325535299449133,
"creation_time": 1742035942.996449,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199963.pt"
]
},
{
"steps": 1399996,
"file_path": "results/Huggy2/Huggy/Huggy-1399996.onnx",
"reward": 3.6758600311434786,
"creation_time": 1742036192.1269994,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399996.pt"
]
},
{
"steps": 1599971,
"file_path": "results/Huggy2/Huggy/Huggy-1599971.onnx",
"reward": 3.818970479182343,
"creation_time": 1742036449.337966,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599971.pt"
]
},
{
"steps": 1799967,
"file_path": "results/Huggy2/Huggy/Huggy-1799967.onnx",
"reward": 3.19503294137808,
"creation_time": 1742036709.2139692,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799967.pt"
]
},
{
"steps": 1999945,
"file_path": "results/Huggy2/Huggy/Huggy-1999945.onnx",
"reward": 3.6637833073957644,
"creation_time": 1742036956.786206,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999945.pt"
]
},
{
"steps": 2000034,
"file_path": "results/Huggy2/Huggy/Huggy-2000034.onnx",
"reward": 3.668941957230615,
"creation_time": 1742036956.9032607,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000034.pt"
]
}
],
"final_checkpoint": {
"steps": 2000034,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.668941957230615,
"creation_time": 1742036956.9032607,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000034.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}