ppo-Huggy / run_logs /training_status.json
conlan's picture
Huggy first version
dbb2e9a
{
"Huggy": {
"checkpoints": [
{
"steps": 199967,
"file_path": "results/Huggy/Huggy/Huggy-199967.onnx",
"reward": 3.3792401915011197,
"creation_time": 1698424321.093684,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199967.pt"
]
},
{
"steps": 399888,
"file_path": "results/Huggy/Huggy/Huggy-399888.onnx",
"reward": 3.712601247855595,
"creation_time": 1698424561.2863228,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399888.pt"
]
},
{
"steps": 599878,
"file_path": "results/Huggy/Huggy/Huggy-599878.onnx",
"reward": 3.9649327993392944,
"creation_time": 1698424802.0092356,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599878.pt"
]
},
{
"steps": 799945,
"file_path": "results/Huggy/Huggy/Huggy-799945.onnx",
"reward": 3.9049970909305243,
"creation_time": 1698425038.5259283,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799945.pt"
]
},
{
"steps": 999994,
"file_path": "results/Huggy/Huggy/Huggy-999994.onnx",
"reward": 3.7316968120061436,
"creation_time": 1698425280.3476806,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999994.pt"
]
},
{
"steps": 1199860,
"file_path": "results/Huggy/Huggy/Huggy-1199860.onnx",
"reward": 3.800026563175938,
"creation_time": 1698425524.43121,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199860.pt"
]
},
{
"steps": 1399996,
"file_path": "results/Huggy/Huggy/Huggy-1399996.onnx",
"reward": 3.7529727435112,
"creation_time": 1698425760.5124393,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399996.pt"
]
},
{
"steps": 1599973,
"file_path": "results/Huggy/Huggy/Huggy-1599973.onnx",
"reward": 4.021418507150609,
"creation_time": 1698426002.5814364,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599973.pt"
]
},
{
"steps": 1799987,
"file_path": "results/Huggy/Huggy/Huggy-1799987.onnx",
"reward": 3.7987974443654906,
"creation_time": 1698426245.3332903,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799987.pt"
]
},
{
"steps": 1999981,
"file_path": "results/Huggy/Huggy/Huggy-1999981.onnx",
"reward": 4.246581521034241,
"creation_time": 1698426486.7331047,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999981.pt"
]
},
{
"steps": 2000039,
"file_path": "results/Huggy/Huggy/Huggy-2000039.onnx",
"reward": 4.172634807916788,
"creation_time": 1698426486.8356946,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000039.pt"
]
}
],
"final_checkpoint": {
"steps": 2000039,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.172634807916788,
"creation_time": 1698426486.8356946,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000039.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu118"
}
}