ppo-Huggy / run_logs /training_status.json
alidenewade's picture
Huggy
8fc9879 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199883,
"file_path": "results/Huggy2/Huggy/Huggy-199883.onnx",
"reward": 3.804324197192346,
"creation_time": 1719639657.5462859,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199883.pt"
]
},
{
"steps": 399782,
"file_path": "results/Huggy2/Huggy/Huggy-399782.onnx",
"reward": 3.5985726555999444,
"creation_time": 1719639889.2406042,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399782.pt"
]
},
{
"steps": 599956,
"file_path": "results/Huggy2/Huggy/Huggy-599956.onnx",
"reward": 4.263704768248966,
"creation_time": 1719640125.906065,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599956.pt"
]
},
{
"steps": 799945,
"file_path": "results/Huggy2/Huggy/Huggy-799945.onnx",
"reward": 3.719537587750037,
"creation_time": 1719640357.7904882,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799945.pt"
]
},
{
"steps": 999860,
"file_path": "results/Huggy2/Huggy/Huggy-999860.onnx",
"reward": 3.942515518115117,
"creation_time": 1719640593.9449701,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999860.pt"
]
},
{
"steps": 1199990,
"file_path": "results/Huggy2/Huggy/Huggy-1199990.onnx",
"reward": 4.322752779349685,
"creation_time": 1719640827.5275512,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199990.pt"
]
},
{
"steps": 1399898,
"file_path": "results/Huggy2/Huggy/Huggy-1399898.onnx",
"reward": 3.9983110709251766,
"creation_time": 1719641062.5469892,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399898.pt"
]
},
{
"steps": 1599707,
"file_path": "results/Huggy2/Huggy/Huggy-1599707.onnx",
"reward": 3.7741296486828917,
"creation_time": 1719641298.9059503,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599707.pt"
]
},
{
"steps": 1799981,
"file_path": "results/Huggy2/Huggy/Huggy-1799981.onnx",
"reward": 3.7567760883545387,
"creation_time": 1719641537.2118068,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799981.pt"
]
},
{
"steps": 1999992,
"file_path": "results/Huggy2/Huggy/Huggy-1999992.onnx",
"reward": 3.7852053493261337,
"creation_time": 1719641774.1271625,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999992.pt"
]
},
{
"steps": 2000018,
"file_path": "results/Huggy2/Huggy/Huggy-2000018.onnx",
"reward": 3.73089574322556,
"creation_time": 1719641774.2363484,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000018.pt"
]
}
],
"final_checkpoint": {
"steps": 2000018,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.73089574322556,
"creation_time": 1719641774.2363484,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000018.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}