ppo-Huggy / run_logs /training_status.json
hungtrab's picture
Huggy
f82fe01 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199754,
"file_path": "results/Huggy2/Huggy/Huggy-199754.onnx",
"reward": 3.421825357278188,
"creation_time": 1755768717.354019,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199754.pt"
]
},
{
"steps": 399890,
"file_path": "results/Huggy2/Huggy/Huggy-399890.onnx",
"reward": 3.4561519643007697,
"creation_time": 1755768987.349017,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399890.pt"
]
},
{
"steps": 599747,
"file_path": "results/Huggy2/Huggy/Huggy-599747.onnx",
"reward": 3.650289820300208,
"creation_time": 1755769262.475748,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599747.pt"
]
},
{
"steps": 799944,
"file_path": "results/Huggy2/Huggy/Huggy-799944.onnx",
"reward": 3.8640568009738265,
"creation_time": 1755769530.4406004,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799944.pt"
]
},
{
"steps": 999985,
"file_path": "results/Huggy2/Huggy/Huggy-999985.onnx",
"reward": 3.978462128135247,
"creation_time": 1755769802.142576,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999985.pt"
]
},
{
"steps": 1199932,
"file_path": "results/Huggy2/Huggy/Huggy-1199932.onnx",
"reward": 3.6044062386666025,
"creation_time": 1755770078.2773936,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199932.pt"
]
},
{
"steps": 1399968,
"file_path": "results/Huggy2/Huggy/Huggy-1399968.onnx",
"reward": 3.9204754166305067,
"creation_time": 1755770350.6318007,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399968.pt"
]
},
{
"steps": 1599730,
"file_path": "results/Huggy2/Huggy/Huggy-1599730.onnx",
"reward": 3.9009053081274034,
"creation_time": 1755770627.5019577,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599730.pt"
]
},
{
"steps": 1799969,
"file_path": "results/Huggy2/Huggy/Huggy-1799969.onnx",
"reward": 3.7592488851012855,
"creation_time": 1755770905.4259365,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799969.pt"
]
},
{
"steps": 1999795,
"file_path": "results/Huggy2/Huggy/Huggy-1999795.onnx",
"reward": 3.8046873722757613,
"creation_time": 1755771186.2425413,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999795.pt"
]
},
{
"steps": 2000054,
"file_path": "results/Huggy2/Huggy/Huggy-2000054.onnx",
"reward": 3.876532661503759,
"creation_time": 1755771186.3607073,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000054.pt"
]
}
],
"final_checkpoint": {
"steps": 2000054,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.876532661503759,
"creation_time": 1755771186.3607073,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000054.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.8.0+cu128"
}
}