ppo-Huggy / run_logs /training_status.json
robomasterQ12's picture
Huggy
263f332 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199920,
"file_path": "results/Huggy2/Huggy/Huggy-199920.onnx",
"reward": 3.2766298227391,
"creation_time": 1744618438.6008332,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199920.pt"
]
},
{
"steps": 399895,
"file_path": "results/Huggy2/Huggy/Huggy-399895.onnx",
"reward": 3.8022617795250633,
"creation_time": 1744618678.5073745,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399895.pt"
]
},
{
"steps": 599951,
"file_path": "results/Huggy2/Huggy/Huggy-599951.onnx",
"reward": 4.03203912292208,
"creation_time": 1744618920.854637,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599951.pt"
]
},
{
"steps": 799985,
"file_path": "results/Huggy2/Huggy/Huggy-799985.onnx",
"reward": 3.8030196288052727,
"creation_time": 1744619160.8210926,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799985.pt"
]
},
{
"steps": 999975,
"file_path": "results/Huggy2/Huggy/Huggy-999975.onnx",
"reward": 3.8600020557641983,
"creation_time": 1744619402.1788704,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999975.pt"
]
},
{
"steps": 1199888,
"file_path": "results/Huggy2/Huggy/Huggy-1199888.onnx",
"reward": 3.5003357357449003,
"creation_time": 1744619644.0083082,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199888.pt"
]
},
{
"steps": 1399798,
"file_path": "results/Huggy2/Huggy/Huggy-1399798.onnx",
"reward": 3.760746565096232,
"creation_time": 1744619881.4909194,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399798.pt"
]
},
{
"steps": 1599983,
"file_path": "results/Huggy2/Huggy/Huggy-1599983.onnx",
"reward": 3.5237856357302886,
"creation_time": 1744620126.8803465,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599983.pt"
]
},
{
"steps": 1799996,
"file_path": "results/Huggy2/Huggy/Huggy-1799996.onnx",
"reward": 3.9189769393060265,
"creation_time": 1744620370.7946985,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799996.pt"
]
},
{
"steps": 1999970,
"file_path": "results/Huggy2/Huggy/Huggy-1999970.onnx",
"reward": 4.503292980946992,
"creation_time": 1744620613.559636,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999970.pt"
]
},
{
"steps": 2000037,
"file_path": "results/Huggy2/Huggy/Huggy-2000037.onnx",
"reward": 4.441685730218888,
"creation_time": 1744620613.692009,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000037.pt"
]
}
],
"final_checkpoint": {
"steps": 2000037,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.441685730218888,
"creation_time": 1744620613.692009,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000037.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}