ppo-Huggy / run_logs /training_status.json
palushok's picture
Huggy
e6cca2e verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199942,
"file_path": "results/Huggy2/Huggy/Huggy-199942.onnx",
"reward": 3.0815448471478053,
"creation_time": 1738852726.7559924,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199942.pt"
]
},
{
"steps": 399996,
"file_path": "results/Huggy2/Huggy/Huggy-399996.onnx",
"reward": 3.9033438778314435,
"creation_time": 1738852977.5044062,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399996.pt"
]
},
{
"steps": 599902,
"file_path": "results/Huggy2/Huggy/Huggy-599902.onnx",
"reward": 4.380771807261875,
"creation_time": 1738853236.345272,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599902.pt"
]
},
{
"steps": 799935,
"file_path": "results/Huggy2/Huggy/Huggy-799935.onnx",
"reward": 3.8220277013200703,
"creation_time": 1738853492.7727232,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799935.pt"
]
},
{
"steps": 999937,
"file_path": "results/Huggy2/Huggy/Huggy-999937.onnx",
"reward": 3.837641586793349,
"creation_time": 1738853748.3348107,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999937.pt"
]
},
{
"steps": 1199989,
"file_path": "results/Huggy2/Huggy/Huggy-1199989.onnx",
"reward": 4.002841569226364,
"creation_time": 1738854003.5166438,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199989.pt"
]
},
{
"steps": 1399999,
"file_path": "results/Huggy2/Huggy/Huggy-1399999.onnx",
"reward": 3.719712451884621,
"creation_time": 1738854261.1111724,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399999.pt"
]
},
{
"steps": 1599947,
"file_path": "results/Huggy2/Huggy/Huggy-1599947.onnx",
"reward": 3.8882498071548786,
"creation_time": 1738854512.9945905,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599947.pt"
]
},
{
"steps": 1799899,
"file_path": "results/Huggy2/Huggy/Huggy-1799899.onnx",
"reward": 4.015378337494934,
"creation_time": 1738854771.571635,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799899.pt"
]
},
{
"steps": 1999960,
"file_path": "results/Huggy2/Huggy/Huggy-1999960.onnx",
"reward": 3.924427318847042,
"creation_time": 1738855027.1093514,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999960.pt"
]
},
{
"steps": 2000043,
"file_path": "results/Huggy2/Huggy/Huggy-2000043.onnx",
"reward": 3.92352718250318,
"creation_time": 1738855027.243883,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000043.pt"
]
}
],
"final_checkpoint": {
"steps": 2000043,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.92352718250318,
"creation_time": 1738855027.243883,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000043.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}