ppo-Huggy / run_logs /training_status.json
RollingBeatle's picture
Huggy
a5dc6a4 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 149992,
"file_path": "results/Huggy2/Huggy/Huggy-149992.onnx",
"reward": 2.7801108016417575,
"creation_time": 1735329194.8146782,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-149992.pt"
]
},
{
"steps": 299407,
"file_path": "results/Huggy2/Huggy/Huggy-299407.onnx",
"reward": 3.7612309424082437,
"creation_time": 1735329367.6708431,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-299407.pt"
]
},
{
"steps": 449882,
"file_path": "results/Huggy2/Huggy/Huggy-449882.onnx",
"reward": 3.617741159090759,
"creation_time": 1735329540.0013857,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-449882.pt"
]
},
{
"steps": 599943,
"file_path": "results/Huggy2/Huggy/Huggy-599943.onnx",
"reward": 3.554134468237559,
"creation_time": 1735329719.6699824,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599943.pt"
]
},
{
"steps": 749887,
"file_path": "results/Huggy2/Huggy/Huggy-749887.onnx",
"reward": 3.655369543167482,
"creation_time": 1735329895.9761965,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-749887.pt"
]
},
{
"steps": 899992,
"file_path": "results/Huggy2/Huggy/Huggy-899992.onnx",
"reward": 3.7082690993944802,
"creation_time": 1735330073.631295,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-899992.pt"
]
},
{
"steps": 1049956,
"file_path": "results/Huggy2/Huggy/Huggy-1049956.onnx",
"reward": 3.8092876843043735,
"creation_time": 1735330254.4302099,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1049956.pt"
]
},
{
"steps": 1199953,
"file_path": "results/Huggy2/Huggy/Huggy-1199953.onnx",
"reward": 3.948659881165153,
"creation_time": 1735330431.7585,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199953.pt"
]
},
{
"steps": 1349945,
"file_path": "results/Huggy2/Huggy/Huggy-1349945.onnx",
"reward": 3.8198140174860047,
"creation_time": 1735330612.8084617,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1349945.pt"
]
},
{
"steps": 1499908,
"file_path": "results/Huggy2/Huggy/Huggy-1499908.onnx",
"reward": 3.960483792384109,
"creation_time": 1735330793.2296984,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1499908.pt"
]
},
{
"steps": 1649988,
"file_path": "results/Huggy2/Huggy/Huggy-1649988.onnx",
"reward": 3.5813780156048862,
"creation_time": 1735330974.4646316,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1649988.pt"
]
},
{
"steps": 1799983,
"file_path": "results/Huggy2/Huggy/Huggy-1799983.onnx",
"reward": 4.087201074759165,
"creation_time": 1735331155.3012183,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799983.pt"
]
},
{
"steps": 1949944,
"file_path": "results/Huggy2/Huggy/Huggy-1949944.onnx",
"reward": 3.7585001280721353,
"creation_time": 1735331334.982591,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1949944.pt"
]
},
{
"steps": 2000182,
"file_path": "results/Huggy2/Huggy/Huggy-2000182.onnx",
"reward": 3.6054768429862127,
"creation_time": 1735331395.6023314,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000182.pt"
]
}
],
"final_checkpoint": {
"steps": 2000182,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.6054768429862127,
"creation_time": 1735331395.6023314,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000182.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.1+cu121"
}
}