ppo-Huggy / run_logs /training_status.json
pb96's picture
Unit-1 Huggy
6e19f84 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199779,
"file_path": "results/Huggy2/Huggy/Huggy-199779.onnx",
"reward": 3.4008898630476834,
"creation_time": 1751584874.933867,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199779.pt"
]
},
{
"steps": 399983,
"file_path": "results/Huggy2/Huggy/Huggy-399983.onnx",
"reward": 3.617083496610883,
"creation_time": 1751585119.177857,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399983.pt"
]
},
{
"steps": 599913,
"file_path": "results/Huggy2/Huggy/Huggy-599913.onnx",
"reward": 4.278756111860275,
"creation_time": 1751585372.668892,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599913.pt"
]
},
{
"steps": 799958,
"file_path": "results/Huggy2/Huggy/Huggy-799958.onnx",
"reward": 3.8793137669563293,
"creation_time": 1751585619.637184,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799958.pt"
]
},
{
"steps": 999979,
"file_path": "results/Huggy2/Huggy/Huggy-999979.onnx",
"reward": 4.026319674270995,
"creation_time": 1751585872.3475318,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999979.pt"
]
},
{
"steps": 1199976,
"file_path": "results/Huggy2/Huggy/Huggy-1199976.onnx",
"reward": 3.880790306174237,
"creation_time": 1751586125.4299471,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199976.pt"
]
},
{
"steps": 1399881,
"file_path": "results/Huggy2/Huggy/Huggy-1399881.onnx",
"reward": 4.735854307810466,
"creation_time": 1751586378.1175296,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399881.pt"
]
},
{
"steps": 1599500,
"file_path": "results/Huggy2/Huggy/Huggy-1599500.onnx",
"reward": 3.9140352158856815,
"creation_time": 1751586628.7853296,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599500.pt"
]
},
{
"steps": 1799928,
"file_path": "results/Huggy2/Huggy/Huggy-1799928.onnx",
"reward": 3.8187499694407934,
"creation_time": 1751586882.3464258,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799928.pt"
]
},
{
"steps": 1999991,
"file_path": "results/Huggy2/Huggy/Huggy-1999991.onnx",
"reward": 4.171113615450651,
"creation_time": 1751587136.3425477,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999991.pt"
]
},
{
"steps": 2000086,
"file_path": "results/Huggy2/Huggy/Huggy-2000086.onnx",
"reward": 4.165574276700933,
"creation_time": 1751587136.4534051,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000086.pt"
]
}
],
"final_checkpoint": {
"steps": 2000086,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.165574276700933,
"creation_time": 1751587136.4534051,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000086.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.7.1+cu126"
}
}