ppo-Huggy / run_logs /training_status.json
Mehdikarim's picture
Huggy
1a33b6e verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199717,
"file_path": "results/Huggy2/Huggy/Huggy-199717.onnx",
"reward": 3.2899899794941856,
"creation_time": 1743617564.2301993,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199717.pt"
]
},
{
"steps": 399958,
"file_path": "results/Huggy2/Huggy/Huggy-399958.onnx",
"reward": 3.9114663460675407,
"creation_time": 1743617813.614271,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399958.pt"
]
},
{
"steps": 599926,
"file_path": "results/Huggy2/Huggy/Huggy-599926.onnx",
"reward": 2.9564470811323686,
"creation_time": 1743618066.7285366,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599926.pt"
]
},
{
"steps": 799908,
"file_path": "results/Huggy2/Huggy/Huggy-799908.onnx",
"reward": 4.050975195981644,
"creation_time": 1743618324.3606427,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799908.pt"
]
},
{
"steps": 999996,
"file_path": "results/Huggy2/Huggy/Huggy-999996.onnx",
"reward": 3.8919638942058823,
"creation_time": 1743618590.1510732,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999996.pt"
]
},
{
"steps": 1199919,
"file_path": "results/Huggy2/Huggy/Huggy-1199919.onnx",
"reward": 4.0112459109975145,
"creation_time": 1743618850.626702,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199919.pt"
]
},
{
"steps": 1399925,
"file_path": "results/Huggy2/Huggy/Huggy-1399925.onnx",
"reward": 3.6100697896697302,
"creation_time": 1743619111.1970592,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399925.pt"
]
},
{
"steps": 1599982,
"file_path": "results/Huggy2/Huggy/Huggy-1599982.onnx",
"reward": 3.722372254855196,
"creation_time": 1743619369.1817908,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599982.pt"
]
},
{
"steps": 1799958,
"file_path": "results/Huggy2/Huggy/Huggy-1799958.onnx",
"reward": 3.7983980321365856,
"creation_time": 1743619625.1291473,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799958.pt"
]
},
{
"steps": 1999765,
"file_path": "results/Huggy2/Huggy/Huggy-1999765.onnx",
"reward": 3.8888705631960994,
"creation_time": 1743619880.3872957,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999765.pt"
]
},
{
"steps": 2000515,
"file_path": "results/Huggy2/Huggy/Huggy-2000515.onnx",
"reward": 3.7749858166490284,
"creation_time": 1743619880.7177215,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000515.pt"
]
}
],
"final_checkpoint": {
"steps": 2000515,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.7749858166490284,
"creation_time": 1743619880.7177215,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000515.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}