ppo-Huggy / run_logs /training_status.json
hieu10x's picture
Huggy
b59f804 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199740,
"file_path": "results/Huggy2/Huggy/Huggy-199740.onnx",
"reward": 3.381565721705556,
"creation_time": 1737023853.9319968,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199740.pt"
]
},
{
"steps": 399905,
"file_path": "results/Huggy2/Huggy/Huggy-399905.onnx",
"reward": 4.125757737085223,
"creation_time": 1737024109.618467,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399905.pt"
]
},
{
"steps": 599883,
"file_path": "results/Huggy2/Huggy/Huggy-599883.onnx",
"reward": 3.5488193498717413,
"creation_time": 1737024372.2484884,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599883.pt"
]
},
{
"steps": 799957,
"file_path": "results/Huggy2/Huggy/Huggy-799957.onnx",
"reward": 3.6495100019951545,
"creation_time": 1737024629.4609272,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799957.pt"
]
},
{
"steps": 999956,
"file_path": "results/Huggy2/Huggy/Huggy-999956.onnx",
"reward": 3.5125004011651746,
"creation_time": 1737024886.7486749,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999956.pt"
]
},
{
"steps": 1199734,
"file_path": "results/Huggy2/Huggy/Huggy-1199734.onnx",
"reward": 4.0330920108529025,
"creation_time": 1737025145.7274797,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199734.pt"
]
},
{
"steps": 1399254,
"file_path": "results/Huggy2/Huggy/Huggy-1399254.onnx",
"reward": 3.383774738101398,
"creation_time": 1737025398.2327979,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399254.pt"
]
},
{
"steps": 1599905,
"file_path": "results/Huggy2/Huggy/Huggy-1599905.onnx",
"reward": 3.0975474773668776,
"creation_time": 1737025658.3817449,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599905.pt"
]
},
{
"steps": 1799885,
"file_path": "results/Huggy2/Huggy/Huggy-1799885.onnx",
"reward": 3.16814152496617,
"creation_time": 1737025917.626625,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799885.pt"
]
},
{
"steps": 1999932,
"file_path": "results/Huggy2/Huggy/Huggy-1999932.onnx",
"reward": 3.1477662037302565,
"creation_time": 1737026170.943948,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999932.pt"
]
},
{
"steps": 2000069,
"file_path": "results/Huggy2/Huggy/Huggy-2000069.onnx",
"reward": 3.1670480209092298,
"creation_time": 1737026171.0667946,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000069.pt"
]
}
],
"final_checkpoint": {
"steps": 2000069,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.1670480209092298,
"creation_time": 1737026171.0667946,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000069.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.1+cu124"
}
}