ppo-Huggy / run_logs /training_status.json
Weiming1122's picture
Huggy
d1d3a44
{
"Huggy": {
"checkpoints": [
{
"steps": 199930,
"file_path": "results/Huggy/Huggy/Huggy-199930.onnx",
"reward": 3.393816965818405,
"creation_time": 1702535159.6535225,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199930.pt"
]
},
{
"steps": 399939,
"file_path": "results/Huggy/Huggy/Huggy-399939.onnx",
"reward": 3.6946988397953557,
"creation_time": 1702535429.7020733,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399939.pt"
]
},
{
"steps": 599930,
"file_path": "results/Huggy/Huggy/Huggy-599930.onnx",
"reward": 3.782136410474777,
"creation_time": 1702535696.7846048,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599930.pt"
]
},
{
"steps": 799940,
"file_path": "results/Huggy/Huggy/Huggy-799940.onnx",
"reward": 3.731499255879929,
"creation_time": 1702535960.424625,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799940.pt"
]
},
{
"steps": 999975,
"file_path": "results/Huggy/Huggy/Huggy-999975.onnx",
"reward": 3.7425290942192078,
"creation_time": 1702536236.3272307,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999975.pt"
]
},
{
"steps": 1199259,
"file_path": "results/Huggy/Huggy/Huggy-1199259.onnx",
"reward": 3.516835344143403,
"creation_time": 1702536510.371181,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199259.pt"
]
},
{
"steps": 1399992,
"file_path": "results/Huggy/Huggy/Huggy-1399992.onnx",
"reward": 3.5204616564546707,
"creation_time": 1702536776.4044063,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399992.pt"
]
},
{
"steps": 1599352,
"file_path": "results/Huggy/Huggy/Huggy-1599352.onnx",
"reward": 3.561065361040448,
"creation_time": 1702537040.3225496,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599352.pt"
]
},
{
"steps": 1799973,
"file_path": "results/Huggy/Huggy/Huggy-1799973.onnx",
"reward": 3.6922562401741743,
"creation_time": 1702537312.2560241,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799973.pt"
]
},
{
"steps": 1999923,
"file_path": "results/Huggy/Huggy/Huggy-1999923.onnx",
"reward": 3.862214452189368,
"creation_time": 1702537577.4890745,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999923.pt"
]
},
{
"steps": 2000026,
"file_path": "results/Huggy/Huggy/Huggy-2000026.onnx",
"reward": 3.874413357306552,
"creation_time": 1702537577.6214464,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000026.pt"
]
}
],
"final_checkpoint": {
"steps": 2000026,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.874413357306552,
"creation_time": 1702537577.6214464,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000026.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.1+cu121"
}
}