ppo-Huggy / run_logs /training_status.json
gbeane66's picture
Huggy
b2fff19 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 16887,
"file_path": "results/Huggy2/Huggy/Huggy-16887.onnx",
"reward": 1.674097183536976,
"creation_time": 1755232850.2462895,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-16887.pt"
]
},
{
"steps": 199881,
"file_path": "results/Huggy2/Huggy/Huggy-199881.onnx",
"reward": 3.283965530602828,
"creation_time": 1755233350.0107722,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199881.pt"
]
},
{
"steps": 399905,
"file_path": "results/Huggy2/Huggy/Huggy-399905.onnx",
"reward": 3.3253818483188233,
"creation_time": 1755233807.1955569,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399905.pt"
]
},
{
"steps": 599990,
"file_path": "results/Huggy2/Huggy/Huggy-599990.onnx",
"reward": 3.834989064014875,
"creation_time": 1755234268.8484182,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599990.pt"
]
},
{
"steps": 799664,
"file_path": "results/Huggy2/Huggy/Huggy-799664.onnx",
"reward": 3.7926482653364224,
"creation_time": 1755234730.217622,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799664.pt"
]
},
{
"steps": 999909,
"file_path": "results/Huggy2/Huggy/Huggy-999909.onnx",
"reward": 3.850331907763201,
"creation_time": 1755235193.7419174,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999909.pt"
]
},
{
"steps": 1199970,
"file_path": "results/Huggy2/Huggy/Huggy-1199970.onnx",
"reward": 3.3229029936449868,
"creation_time": 1755235654.9292598,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199970.pt"
]
},
{
"steps": 1399934,
"file_path": "results/Huggy2/Huggy/Huggy-1399934.onnx",
"reward": 3.304411281238903,
"creation_time": 1755236114.517097,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399934.pt"
]
},
{
"steps": 1599442,
"file_path": "results/Huggy2/Huggy/Huggy-1599442.onnx",
"reward": 3.3619075524721214,
"creation_time": 1755236562.9389296,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599442.pt"
]
},
{
"steps": 1799726,
"file_path": "results/Huggy2/Huggy/Huggy-1799726.onnx",
"reward": 3.64475109733519,
"creation_time": 1755237029.9191477,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799726.pt"
]
},
{
"steps": 1999922,
"file_path": "results/Huggy2/Huggy/Huggy-1999922.onnx",
"reward": 3.800425184016325,
"creation_time": 1755237493.5420673,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999922.pt"
]
},
{
"steps": 2000041,
"file_path": "results/Huggy2/Huggy/Huggy-2000041.onnx",
"reward": 3.8686070013046265,
"creation_time": 1755237493.660796,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000041.pt"
]
}
],
"final_checkpoint": {
"steps": 2000041,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.8686070013046265,
"creation_time": 1755237493.660796,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000041.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.8.0+cu128"
}
}