ppo-Huggy / run_logs /training_status.json
Milad-R's picture
Huggy
9752d18 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199836,
"file_path": "results/Huggy2/Huggy/Huggy-199836.onnx",
"reward": 3.5181051262638023,
"creation_time": 1719053173.2293403,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199836.pt"
]
},
{
"steps": 399259,
"file_path": "results/Huggy2/Huggy/Huggy-399259.onnx",
"reward": 3.403180312779215,
"creation_time": 1719053425.4003916,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399259.pt"
]
},
{
"steps": 599978,
"file_path": "results/Huggy2/Huggy/Huggy-599978.onnx",
"reward": 4.311974763870239,
"creation_time": 1719053687.2667894,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599978.pt"
]
},
{
"steps": 799988,
"file_path": "results/Huggy2/Huggy/Huggy-799988.onnx",
"reward": 3.764456101628237,
"creation_time": 1719053949.8907182,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799988.pt"
]
},
{
"steps": 999806,
"file_path": "results/Huggy2/Huggy/Huggy-999806.onnx",
"reward": 3.5714525757012545,
"creation_time": 1719054218.0107303,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999806.pt"
]
},
{
"steps": 1199922,
"file_path": "results/Huggy2/Huggy/Huggy-1199922.onnx",
"reward": 3.815959807704477,
"creation_time": 1719054481.6120307,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199922.pt"
]
},
{
"steps": 1399964,
"file_path": "results/Huggy2/Huggy/Huggy-1399964.onnx",
"reward": 3.7761435454541985,
"creation_time": 1719054737.8012664,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399964.pt"
]
},
{
"steps": 1599992,
"file_path": "results/Huggy2/Huggy/Huggy-1599992.onnx",
"reward": 3.609573473236454,
"creation_time": 1719054994.9362807,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599992.pt"
]
},
{
"steps": 1799985,
"file_path": "results/Huggy2/Huggy/Huggy-1799985.onnx",
"reward": 3.5250186125437417,
"creation_time": 1719055255.193952,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799985.pt"
]
},
{
"steps": 1999655,
"file_path": "results/Huggy2/Huggy/Huggy-1999655.onnx",
"reward": 4.857850869496663,
"creation_time": 1719055524.316731,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999655.pt"
]
},
{
"steps": 2000405,
"file_path": "results/Huggy2/Huggy/Huggy-2000405.onnx",
"reward": 3.7042878695896695,
"creation_time": 1719055524.4676368,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000405.pt"
]
}
],
"final_checkpoint": {
"steps": 2000405,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.7042878695896695,
"creation_time": 1719055524.4676368,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000405.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}