ppo-Huggy / run_logs /training_status.json
alex-daly's picture
Huggy
cefa39e verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199568,
"file_path": "results/Huggy2/Huggy/Huggy-199568.onnx",
"reward": 3.225798040169936,
"creation_time": 1736873138.064904,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199568.pt"
]
},
{
"steps": 399502,
"file_path": "results/Huggy2/Huggy/Huggy-399502.onnx",
"reward": 3.4900276247532136,
"creation_time": 1736873379.6737823,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399502.pt"
]
},
{
"steps": 599990,
"file_path": "results/Huggy2/Huggy/Huggy-599990.onnx",
"reward": 5.5460450649261475,
"creation_time": 1736873634.374165,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599990.pt"
]
},
{
"steps": 799920,
"file_path": "results/Huggy2/Huggy/Huggy-799920.onnx",
"reward": 3.7581249380933825,
"creation_time": 1736873880.1932347,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799920.pt"
]
},
{
"steps": 999956,
"file_path": "results/Huggy2/Huggy/Huggy-999956.onnx",
"reward": 3.123145643621683,
"creation_time": 1736874133.7045841,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999956.pt"
]
},
{
"steps": 1199922,
"file_path": "results/Huggy2/Huggy/Huggy-1199922.onnx",
"reward": 4.063998014696183,
"creation_time": 1736874379.2230244,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199922.pt"
]
},
{
"steps": 1399538,
"file_path": "results/Huggy2/Huggy/Huggy-1399538.onnx",
"reward": 3.758260130561808,
"creation_time": 1736874620.4784832,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399538.pt"
]
},
{
"steps": 1599265,
"file_path": "results/Huggy2/Huggy/Huggy-1599265.onnx",
"reward": 3.6949048673149445,
"creation_time": 1736874865.3344405,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599265.pt"
]
},
{
"steps": 1799989,
"file_path": "results/Huggy2/Huggy/Huggy-1799989.onnx",
"reward": 3.7565896976953264,
"creation_time": 1736875113.7929444,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799989.pt"
]
},
{
"steps": 1999933,
"file_path": "results/Huggy2/Huggy/Huggy-1999933.onnx",
"reward": 3.7317848920822145,
"creation_time": 1736875366.8657165,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999933.pt"
]
},
{
"steps": 2000007,
"file_path": "results/Huggy2/Huggy/Huggy-2000007.onnx",
"reward": 3.9121023615201316,
"creation_time": 1736875367.0433593,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000007.pt"
]
}
],
"final_checkpoint": {
"steps": 2000007,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.9121023615201316,
"creation_time": 1736875367.0433593,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000007.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.1+cu121"
}
}