ppo-Huggy / run_logs /training_status.json
wefio's picture
Upload 29 files
758eac4 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199734,
"file_path": "results/Huggy2/Huggy/Huggy-199734.onnx",
"reward": 3.5950988496559253,
"creation_time": 1753435615.3012595,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199734.pt"
]
},
{
"steps": 399949,
"file_path": "results/Huggy2/Huggy/Huggy-399949.onnx",
"reward": 3.6136746164914726,
"creation_time": 1753435856.090985,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399949.pt"
]
},
{
"steps": 599906,
"file_path": "results/Huggy2/Huggy/Huggy-599906.onnx",
"reward": 3.6325339593670587,
"creation_time": 1753436098.400537,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599906.pt"
]
},
{
"steps": 799980,
"file_path": "results/Huggy2/Huggy/Huggy-799980.onnx",
"reward": 3.8220983356237412,
"creation_time": 1753436339.981649,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799980.pt"
]
},
{
"steps": 999985,
"file_path": "results/Huggy2/Huggy/Huggy-999985.onnx",
"reward": 3.8423862571411944,
"creation_time": 1753436588.9444423,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999985.pt"
]
},
{
"steps": 1199985,
"file_path": "results/Huggy2/Huggy/Huggy-1199985.onnx",
"reward": 3.9729016121853604,
"creation_time": 1753436838.2215614,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199985.pt"
]
},
{
"steps": 1399992,
"file_path": "results/Huggy2/Huggy/Huggy-1399992.onnx",
"reward": 4.191382080316544,
"creation_time": 1753437088.86621,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399992.pt"
]
},
{
"steps": 1599910,
"file_path": "results/Huggy2/Huggy/Huggy-1599910.onnx",
"reward": 4.032076658204544,
"creation_time": 1753437356.5700521,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599910.pt"
]
},
{
"steps": 1799972,
"file_path": "results/Huggy2/Huggy/Huggy-1799972.onnx",
"reward": 4.063101674253876,
"creation_time": 1753437608.279501,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799972.pt"
]
},
{
"steps": 1999937,
"file_path": "results/Huggy2/Huggy/Huggy-1999937.onnx",
"reward": 4.061567697952043,
"creation_time": 1753437861.1863081,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999937.pt"
]
},
{
"steps": 2000005,
"file_path": "results/Huggy2/Huggy/Huggy-2000005.onnx",
"reward": 4.054498020340414,
"creation_time": 1753437861.2972965,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000005.pt"
]
}
],
"final_checkpoint": {
"steps": 2000005,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.054498020340414,
"creation_time": 1753437861.2972965,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000005.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0",
"torch_version": "2.7.1+cu126"
}
}