ppo-Huggy / run_logs /training_status.json
abkimc's picture
Huggy
4246003 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199893,
"file_path": "results/Huggy2/Huggy/Huggy-199893.onnx",
"reward": 3.766396440565586,
"creation_time": 1746261728.6188877,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199893.pt"
]
},
{
"steps": 399947,
"file_path": "results/Huggy2/Huggy/Huggy-399947.onnx",
"reward": 3.971951260648925,
"creation_time": 1746261961.9309087,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399947.pt"
]
},
{
"steps": 599963,
"file_path": "results/Huggy2/Huggy/Huggy-599963.onnx",
"reward": 3.659182755570663,
"creation_time": 1746262199.5777643,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599963.pt"
]
},
{
"steps": 799262,
"file_path": "results/Huggy2/Huggy/Huggy-799262.onnx",
"reward": 3.8178982288425507,
"creation_time": 1746262438.5052586,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799262.pt"
]
},
{
"steps": 999897,
"file_path": "results/Huggy2/Huggy/Huggy-999897.onnx",
"reward": 4.036661062184281,
"creation_time": 1746262680.0315738,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999897.pt"
]
},
{
"steps": 1199925,
"file_path": "results/Huggy2/Huggy/Huggy-1199925.onnx",
"reward": 3.9344036432412954,
"creation_time": 1746262927.0151463,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199925.pt"
]
},
{
"steps": 1399953,
"file_path": "results/Huggy2/Huggy/Huggy-1399953.onnx",
"reward": 3.722047786915164,
"creation_time": 1746263168.343705,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399953.pt"
]
},
{
"steps": 1599936,
"file_path": "results/Huggy2/Huggy/Huggy-1599936.onnx",
"reward": 3.678568720451893,
"creation_time": 1746263411.5157323,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599936.pt"
]
},
{
"steps": 1799804,
"file_path": "results/Huggy2/Huggy/Huggy-1799804.onnx",
"reward": 3.9059437964207087,
"creation_time": 1746263656.6873446,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799804.pt"
]
},
{
"steps": 1999964,
"file_path": "results/Huggy2/Huggy/Huggy-1999964.onnx",
"reward": 3.2853254079818726,
"creation_time": 1746263901.8107615,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999964.pt"
]
},
{
"steps": 2000021,
"file_path": "results/Huggy2/Huggy/Huggy-2000021.onnx",
"reward": 3.3413347403208413,
"creation_time": 1746263901.9180458,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000021.pt"
]
}
],
"final_checkpoint": {
"steps": 2000021,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.3413347403208413,
"creation_time": 1746263901.9180458,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000021.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.7.0+cu126"
}
}