ppo-Huggy / run_logs /training_status.json
phanthaiduong21's picture
Huggy
d67f1a2 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199799,
"file_path": "results/Huggy2/Huggy/Huggy-199799.onnx",
"reward": 3.4747128935827725,
"creation_time": 1730346503.741889,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199799.pt"
]
},
{
"steps": 399934,
"file_path": "results/Huggy2/Huggy/Huggy-399934.onnx",
"reward": 3.853094838951763,
"creation_time": 1730346962.3138616,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399934.pt"
]
},
{
"steps": 599966,
"file_path": "results/Huggy2/Huggy/Huggy-599966.onnx",
"reward": 3.824179838101069,
"creation_time": 1730347427.6564581,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599966.pt"
]
},
{
"steps": 799953,
"file_path": "results/Huggy2/Huggy/Huggy-799953.onnx",
"reward": 3.936794911338165,
"creation_time": 1730347878.3078175,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799953.pt"
]
},
{
"steps": 999964,
"file_path": "results/Huggy2/Huggy/Huggy-999964.onnx",
"reward": 3.6935533305345953,
"creation_time": 1730348347.5343328,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999964.pt"
]
},
{
"steps": 1199978,
"file_path": "results/Huggy2/Huggy/Huggy-1199978.onnx",
"reward": 3.736277758434255,
"creation_time": 1730348819.117167,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199978.pt"
]
},
{
"steps": 1399963,
"file_path": "results/Huggy2/Huggy/Huggy-1399963.onnx",
"reward": 3.6519450587885722,
"creation_time": 1730349288.027615,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399963.pt"
]
},
{
"steps": 1599484,
"file_path": "results/Huggy2/Huggy/Huggy-1599484.onnx",
"reward": 3.807477089095878,
"creation_time": 1730349740.2596622,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599484.pt"
]
},
{
"steps": 1799959,
"file_path": "results/Huggy2/Huggy/Huggy-1799959.onnx",
"reward": 3.681469304221017,
"creation_time": 1730350209.9306188,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799959.pt"
]
},
{
"steps": 1999942,
"file_path": "results/Huggy2/Huggy/Huggy-1999942.onnx",
"reward": 3.883095347112225,
"creation_time": 1730350669.7126195,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999942.pt"
]
},
{
"steps": 2000075,
"file_path": "results/Huggy2/Huggy/Huggy-2000075.onnx",
"reward": 3.8881864755872697,
"creation_time": 1730350669.897798,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000075.pt"
]
}
],
"final_checkpoint": {
"steps": 2000075,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.8881864755872697,
"creation_time": 1730350669.897798,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000075.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.0+cu121"
}
}