ppo-Huggy / run_logs /training_status.json
truffulatree's picture
Testing Huggy push to HF
ffbd70c
{
"Huggy": {
"checkpoints": [
{
"steps": 199649,
"file_path": "results/Huggy2/Huggy/Huggy-199649.onnx",
"reward": 3.542105872184038,
"creation_time": 1703449412.6786005,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199649.pt"
]
},
{
"steps": 399981,
"file_path": "results/Huggy2/Huggy/Huggy-399981.onnx",
"reward": 3.5426363699576435,
"creation_time": 1703449671.234149,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399981.pt"
]
},
{
"steps": 599921,
"file_path": "results/Huggy2/Huggy/Huggy-599921.onnx",
"reward": 4.232880234718323,
"creation_time": 1703449931.664979,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599921.pt"
]
},
{
"steps": 799977,
"file_path": "results/Huggy2/Huggy/Huggy-799977.onnx",
"reward": 3.683307488759359,
"creation_time": 1703450187.4433143,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799977.pt"
]
},
{
"steps": 999870,
"file_path": "results/Huggy2/Huggy/Huggy-999870.onnx",
"reward": 3.964497803412762,
"creation_time": 1703450450.5038972,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999870.pt"
]
},
{
"steps": 1199969,
"file_path": "results/Huggy2/Huggy/Huggy-1199969.onnx",
"reward": 3.4172254034451077,
"creation_time": 1703450709.0821104,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199969.pt"
]
},
{
"steps": 1399892,
"file_path": "results/Huggy2/Huggy/Huggy-1399892.onnx",
"reward": 3.7524919222382938,
"creation_time": 1703450963.9049668,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399892.pt"
]
},
{
"steps": 1599548,
"file_path": "results/Huggy2/Huggy/Huggy-1599548.onnx",
"reward": 3.8213467877523035,
"creation_time": 1703451224.4538417,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599548.pt"
]
},
{
"steps": 1799962,
"file_path": "results/Huggy2/Huggy/Huggy-1799962.onnx",
"reward": 3.620374796482233,
"creation_time": 1703451489.2123673,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799962.pt"
]
},
{
"steps": 1999698,
"file_path": "results/Huggy2/Huggy/Huggy-1999698.onnx",
"reward": 3.6011123886233882,
"creation_time": 1703451751.8738055,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999698.pt"
]
},
{
"steps": 2000448,
"file_path": "results/Huggy2/Huggy/Huggy-2000448.onnx",
"reward": 3.565009209810127,
"creation_time": 1703451752.0856242,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000448.pt"
]
}
],
"final_checkpoint": {
"steps": 2000448,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.565009209810127,
"creation_time": 1703451752.0856242,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000448.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.2+cu121"
}
}