ppo-Huggy / run_logs /training_status.json
Alvin12345's picture
Huggy
311a63b verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199737,
"file_path": "results/Huggy2/Huggy/Huggy-199737.onnx",
"reward": 3.5238566212966793,
"creation_time": 1718606553.3067162,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199737.pt"
]
},
{
"steps": 399877,
"file_path": "results/Huggy2/Huggy/Huggy-399877.onnx",
"reward": 4.150297541171312,
"creation_time": 1718606703.1642478,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399877.pt"
]
},
{
"steps": 599994,
"file_path": "results/Huggy2/Huggy/Huggy-599994.onnx",
"reward": 3.00426127910614,
"creation_time": 1718606853.7469053,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599994.pt"
]
},
{
"steps": 799942,
"file_path": "results/Huggy2/Huggy/Huggy-799942.onnx",
"reward": 4.043531291917259,
"creation_time": 1718607005.5142913,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799942.pt"
]
},
{
"steps": 999915,
"file_path": "results/Huggy2/Huggy/Huggy-999915.onnx",
"reward": 4.058584794962316,
"creation_time": 1718607158.3195856,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999915.pt"
]
},
{
"steps": 1199983,
"file_path": "results/Huggy2/Huggy/Huggy-1199983.onnx",
"reward": 3.9641119678815206,
"creation_time": 1718607312.700699,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199983.pt"
]
},
{
"steps": 1399978,
"file_path": "results/Huggy2/Huggy/Huggy-1399978.onnx",
"reward": 4.336638450622559,
"creation_time": 1718607467.4390142,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399978.pt"
]
},
{
"steps": 1599982,
"file_path": "results/Huggy2/Huggy/Huggy-1599982.onnx",
"reward": 3.8755927140062507,
"creation_time": 1718607619.1443136,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599982.pt"
]
},
{
"steps": 1799638,
"file_path": "results/Huggy2/Huggy/Huggy-1799638.onnx",
"reward": 3.945387254228125,
"creation_time": 1718607774.3595214,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799638.pt"
]
},
{
"steps": 1999964,
"file_path": "results/Huggy2/Huggy/Huggy-1999964.onnx",
"reward": 3.816628818852561,
"creation_time": 1718607928.4089236,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999964.pt"
]
},
{
"steps": 2000041,
"file_path": "results/Huggy2/Huggy/Huggy-2000041.onnx",
"reward": 3.8109814969586653,
"creation_time": 1718607928.5617542,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000041.pt"
]
}
],
"final_checkpoint": {
"steps": 2000041,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.8109814969586653,
"creation_time": 1718607928.5617542,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000041.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.1+cu121"
}
}