ppo-Huggy / run_logs /training_status.json
tinywell's picture
Huggy
4897dd5 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199681,
"file_path": "results/Huggy/Huggy/Huggy-199681.onnx",
"reward": 3.2469403287943672,
"creation_time": 1706060455.2851846,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199681.pt"
]
},
{
"steps": 399985,
"file_path": "results/Huggy/Huggy/Huggy-399985.onnx",
"reward": 3.393440158367157,
"creation_time": 1706060710.3714378,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399985.pt"
]
},
{
"steps": 599966,
"file_path": "results/Huggy/Huggy/Huggy-599966.onnx",
"reward": 3.3480025785309926,
"creation_time": 1706060965.8926249,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599966.pt"
]
},
{
"steps": 799812,
"file_path": "results/Huggy/Huggy/Huggy-799812.onnx",
"reward": 3.6697268942643326,
"creation_time": 1706061219.7033439,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799812.pt"
]
},
{
"steps": 999972,
"file_path": "results/Huggy/Huggy/Huggy-999972.onnx",
"reward": 3.889913766673117,
"creation_time": 1706061479.5781634,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999972.pt"
]
},
{
"steps": 1199964,
"file_path": "results/Huggy/Huggy/Huggy-1199964.onnx",
"reward": 3.972685396671295,
"creation_time": 1706061737.4107478,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199964.pt"
]
},
{
"steps": 1399980,
"file_path": "results/Huggy/Huggy/Huggy-1399980.onnx",
"reward": 3.840079189543743,
"creation_time": 1706061994.5717142,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399980.pt"
]
},
{
"steps": 1599988,
"file_path": "results/Huggy/Huggy/Huggy-1599988.onnx",
"reward": 3.7546646080091985,
"creation_time": 1706062257.2171783,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599988.pt"
]
},
{
"steps": 1799972,
"file_path": "results/Huggy/Huggy/Huggy-1799972.onnx",
"reward": 3.8354428555654443,
"creation_time": 1706062518.393774,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799972.pt"
]
},
{
"steps": 1999987,
"file_path": "results/Huggy/Huggy/Huggy-1999987.onnx",
"reward": 4.197162237660638,
"creation_time": 1706062780.3078065,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999987.pt"
]
},
{
"steps": 2000082,
"file_path": "results/Huggy/Huggy/Huggy-2000082.onnx",
"reward": 4.2053663770357765,
"creation_time": 1706062780.444065,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000082.pt"
]
}
],
"final_checkpoint": {
"steps": 2000082,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.2053663770357765,
"creation_time": 1706062780.444065,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000082.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.2+cu121"
}
}