ppo-Huggy / run_logs /training_status.json
Vivek-huggingface's picture
Huggy
517c1aa verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199840,
"file_path": "results/Huggy2/Huggy/Huggy-199840.onnx",
"reward": 3.3817091656142266,
"creation_time": 1722154818.809117,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199840.pt"
]
},
{
"steps": 399966,
"file_path": "results/Huggy2/Huggy/Huggy-399966.onnx",
"reward": 3.584411450794765,
"creation_time": 1722155055.1193159,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399966.pt"
]
},
{
"steps": 599984,
"file_path": "results/Huggy2/Huggy/Huggy-599984.onnx",
"reward": 3.5700843155384065,
"creation_time": 1722155301.1818137,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599984.pt"
]
},
{
"steps": 799941,
"file_path": "results/Huggy2/Huggy/Huggy-799941.onnx",
"reward": 4.02521657290524,
"creation_time": 1722155540.3061256,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799941.pt"
]
},
{
"steps": 999997,
"file_path": "results/Huggy2/Huggy/Huggy-999997.onnx",
"reward": 3.514405726282685,
"creation_time": 1722155782.9196503,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999997.pt"
]
},
{
"steps": 1199968,
"file_path": "results/Huggy2/Huggy/Huggy-1199968.onnx",
"reward": 3.891279519928826,
"creation_time": 1722156029.5690413,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199968.pt"
]
},
{
"steps": 1399903,
"file_path": "results/Huggy2/Huggy/Huggy-1399903.onnx",
"reward": 3.844520130779891,
"creation_time": 1722156271.6955104,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399903.pt"
]
},
{
"steps": 1599929,
"file_path": "results/Huggy2/Huggy/Huggy-1599929.onnx",
"reward": 3.857455266522361,
"creation_time": 1722156519.0240705,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599929.pt"
]
},
{
"steps": 1799982,
"file_path": "results/Huggy2/Huggy/Huggy-1799982.onnx",
"reward": 3.4891182259509437,
"creation_time": 1722156765.0841477,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799982.pt"
]
},
{
"steps": 1999978,
"file_path": "results/Huggy2/Huggy/Huggy-1999978.onnx",
"reward": 3.7314698855082193,
"creation_time": 1722157010.3793488,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999978.pt"
]
},
{
"steps": 2000047,
"file_path": "results/Huggy2/Huggy/Huggy-2000047.onnx",
"reward": 3.7596291880453787,
"creation_time": 1722157010.4947476,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000047.pt"
]
}
],
"final_checkpoint": {
"steps": 2000047,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.7596291880453787,
"creation_time": 1722157010.4947476,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000047.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.1+cu121"
}
}