ppo-Huggy / run_logs /training_status.json
Sylvain Lapeyrade
Huggy
cfb2f0e verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199948,
"file_path": "results/Huggy2/Huggy/Huggy-199948.onnx",
"reward": 3.667428812613854,
"creation_time": 1744634388.395132,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199948.pt"
]
},
{
"steps": 399939,
"file_path": "results/Huggy2/Huggy/Huggy-399939.onnx",
"reward": 3.862854402860006,
"creation_time": 1744634642.7770848,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399939.pt"
]
},
{
"steps": 599955,
"file_path": "results/Huggy2/Huggy/Huggy-599955.onnx",
"reward": 3.95019459363186,
"creation_time": 1744634902.1803772,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599955.pt"
]
},
{
"steps": 799948,
"file_path": "results/Huggy2/Huggy/Huggy-799948.onnx",
"reward": 3.7426924572761404,
"creation_time": 1744635155.667655,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799948.pt"
]
},
{
"steps": 999920,
"file_path": "results/Huggy2/Huggy/Huggy-999920.onnx",
"reward": 3.823903396537032,
"creation_time": 1744635414.3851004,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999920.pt"
]
},
{
"steps": 1199992,
"file_path": "results/Huggy2/Huggy/Huggy-1199992.onnx",
"reward": 3.8263861658752605,
"creation_time": 1744635671.906583,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199992.pt"
]
},
{
"steps": 1399979,
"file_path": "results/Huggy2/Huggy/Huggy-1399979.onnx",
"reward": 4.021209680515787,
"creation_time": 1744635929.0227234,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399979.pt"
]
},
{
"steps": 1599945,
"file_path": "results/Huggy2/Huggy/Huggy-1599945.onnx",
"reward": 3.91076022028923,
"creation_time": 1744636182.7734847,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599945.pt"
]
},
{
"steps": 1799920,
"file_path": "results/Huggy2/Huggy/Huggy-1799920.onnx",
"reward": 4.006640928321414,
"creation_time": 1744636435.4502227,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799920.pt"
]
},
{
"steps": 1999951,
"file_path": "results/Huggy2/Huggy/Huggy-1999951.onnx",
"reward": 3.9405787492143936,
"creation_time": 1744636687.9584565,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999951.pt"
]
},
{
"steps": 2000041,
"file_path": "results/Huggy2/Huggy/Huggy-2000041.onnx",
"reward": 3.9487845199448723,
"creation_time": 1744636688.1452465,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000041.pt"
]
}
],
"final_checkpoint": {
"steps": 2000041,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.9487845199448723,
"creation_time": 1744636688.1452465,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000041.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}