ppo-Huggy / run_logs /training_status.json
Elahe96's picture
Huggy
7f59c19 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199953,
"file_path": "results/Huggy2/Huggy/Huggy-199953.onnx",
"reward": 3.287214601194704,
"creation_time": 1718907394.4808226,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199953.pt"
]
},
{
"steps": 399957,
"file_path": "results/Huggy2/Huggy/Huggy-399957.onnx",
"reward": 3.788900040347001,
"creation_time": 1718907655.2084377,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399957.pt"
]
},
{
"steps": 599999,
"file_path": "results/Huggy2/Huggy/Huggy-599999.onnx",
"reward": 4.3095818974755025,
"creation_time": 1718907918.5995684,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599999.pt"
]
},
{
"steps": 799956,
"file_path": "results/Huggy2/Huggy/Huggy-799956.onnx",
"reward": 3.7144138252663756,
"creation_time": 1718908183.0322442,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799956.pt"
]
},
{
"steps": 999887,
"file_path": "results/Huggy2/Huggy/Huggy-999887.onnx",
"reward": 3.908162585249892,
"creation_time": 1718908454.3043687,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999887.pt"
]
},
{
"steps": 1199943,
"file_path": "results/Huggy2/Huggy/Huggy-1199943.onnx",
"reward": 3.9303306232799184,
"creation_time": 1718908721.5407357,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199943.pt"
]
},
{
"steps": 1399969,
"file_path": "results/Huggy2/Huggy/Huggy-1399969.onnx",
"reward": 3.87090903690076,
"creation_time": 1718908994.1495047,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399969.pt"
]
},
{
"steps": 1599914,
"file_path": "results/Huggy2/Huggy/Huggy-1599914.onnx",
"reward": 3.9514385353773833,
"creation_time": 1718909269.361817,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599914.pt"
]
},
{
"steps": 1799950,
"file_path": "results/Huggy2/Huggy/Huggy-1799950.onnx",
"reward": 4.043169311501763,
"creation_time": 1718909543.7887018,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799950.pt"
]
},
{
"steps": 1999484,
"file_path": "results/Huggy2/Huggy/Huggy-1999484.onnx",
"reward": 3.4570078538811724,
"creation_time": 1718909818.1803894,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999484.pt"
]
},
{
"steps": 2000234,
"file_path": "results/Huggy2/Huggy/Huggy-2000234.onnx",
"reward": 3.2801980525255203,
"creation_time": 1718909818.335204,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000234.pt"
]
}
],
"final_checkpoint": {
"steps": 2000234,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.2801980525255203,
"creation_time": 1718909818.335204,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000234.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}