ppo-Huggy / run_logs /training_status.json
kenyou's picture
Huggy
92a8112 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199673,
"file_path": "results/Huggy2/Huggy/Huggy-199673.onnx",
"reward": 3.4802619192911233,
"creation_time": 1744785327.980485,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199673.pt"
]
},
{
"steps": 399934,
"file_path": "results/Huggy2/Huggy/Huggy-399934.onnx",
"reward": 3.7315803161689214,
"creation_time": 1744785569.3414795,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399934.pt"
]
},
{
"steps": 599885,
"file_path": "results/Huggy2/Huggy/Huggy-599885.onnx",
"reward": 3.357610815449765,
"creation_time": 1744785811.4533322,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599885.pt"
]
},
{
"steps": 799969,
"file_path": "results/Huggy2/Huggy/Huggy-799969.onnx",
"reward": 3.8512431440814847,
"creation_time": 1744786055.5338895,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799969.pt"
]
},
{
"steps": 999922,
"file_path": "results/Huggy2/Huggy/Huggy-999922.onnx",
"reward": 3.8625445073964646,
"creation_time": 1744786303.0265338,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999922.pt"
]
},
{
"steps": 1199926,
"file_path": "results/Huggy2/Huggy/Huggy-1199926.onnx",
"reward": 4.338115288054242,
"creation_time": 1744786551.2826588,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199926.pt"
]
},
{
"steps": 1399938,
"file_path": "results/Huggy2/Huggy/Huggy-1399938.onnx",
"reward": 4.404623031616211,
"creation_time": 1744786798.82183,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399938.pt"
]
},
{
"steps": 1599537,
"file_path": "results/Huggy2/Huggy/Huggy-1599537.onnx",
"reward": 3.6397245123207225,
"creation_time": 1744787042.2246354,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599537.pt"
]
},
{
"steps": 1799988,
"file_path": "results/Huggy2/Huggy/Huggy-1799988.onnx",
"reward": 3.676146494931188,
"creation_time": 1744787290.0130806,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799988.pt"
]
},
{
"steps": 1999753,
"file_path": "results/Huggy2/Huggy/Huggy-1999753.onnx",
"reward": 3.805484047302833,
"creation_time": 1744787535.3836513,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999753.pt"
]
},
{
"steps": 2000503,
"file_path": "results/Huggy2/Huggy/Huggy-2000503.onnx",
"reward": 3.2841640966279164,
"creation_time": 1744787535.533265,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000503.pt"
]
}
],
"final_checkpoint": {
"steps": 2000503,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.2841640966279164,
"creation_time": 1744787535.533265,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000503.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}