ppo-Huggy / run_logs /training_status.json
sunxysun's picture
Huggy
06dd526 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199967,
"file_path": "results/Huggy2/Huggy/Huggy-199967.onnx",
"reward": 3.3851566893467004,
"creation_time": 1754126910.2228956,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199967.pt"
]
},
{
"steps": 399849,
"file_path": "results/Huggy2/Huggy/Huggy-399849.onnx",
"reward": 3.836982261716274,
"creation_time": 1754127193.398261,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399849.pt"
]
},
{
"steps": 599915,
"file_path": "results/Huggy2/Huggy/Huggy-599915.onnx",
"reward": 3.2142979273429284,
"creation_time": 1754127480.6661153,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599915.pt"
]
},
{
"steps": 799934,
"file_path": "results/Huggy2/Huggy/Huggy-799934.onnx",
"reward": 4.005075302057796,
"creation_time": 1754127769.7862124,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799934.pt"
]
},
{
"steps": 999505,
"file_path": "results/Huggy2/Huggy/Huggy-999505.onnx",
"reward": 3.9850825667381287,
"creation_time": 1754128059.372016,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999505.pt"
]
},
{
"steps": 1199963,
"file_path": "results/Huggy2/Huggy/Huggy-1199963.onnx",
"reward": 3.825408433613024,
"creation_time": 1754128347.244607,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199963.pt"
]
},
{
"steps": 1399982,
"file_path": "results/Huggy2/Huggy/Huggy-1399982.onnx",
"reward": 4.985818147659302,
"creation_time": 1754128634.8988838,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399982.pt"
]
},
{
"steps": 1599934,
"file_path": "results/Huggy2/Huggy/Huggy-1599934.onnx",
"reward": 3.8320785385111105,
"creation_time": 1754128925.0727158,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599934.pt"
]
},
{
"steps": 1799946,
"file_path": "results/Huggy2/Huggy/Huggy-1799946.onnx",
"reward": 3.655827819072094,
"creation_time": 1754129217.9841034,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799946.pt"
]
},
{
"steps": 1999938,
"file_path": "results/Huggy2/Huggy/Huggy-1999938.onnx",
"reward": 3.4213191781725203,
"creation_time": 1754129508.8396757,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999938.pt"
]
},
{
"steps": 2000688,
"file_path": "results/Huggy2/Huggy/Huggy-2000688.onnx",
"reward": 3.2322127289242215,
"creation_time": 1754129508.9799578,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000688.pt"
]
}
],
"final_checkpoint": {
"steps": 2000688,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.2322127289242215,
"creation_time": 1754129508.9799578,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000688.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.7.1+cu126"
}
}