ppo-Huggy / run_logs /training_status.json
madmage's picture
Huggy
a265294 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199937,
"file_path": "results/Huggy2/Huggy/Huggy-199937.onnx",
"reward": 3.251158939089094,
"creation_time": 1742575879.10572,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199937.pt"
]
},
{
"steps": 399961,
"file_path": "results/Huggy2/Huggy/Huggy-399961.onnx",
"reward": 3.798746352536338,
"creation_time": 1742576135.680641,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399961.pt"
]
},
{
"steps": 599929,
"file_path": "results/Huggy2/Huggy/Huggy-599929.onnx",
"reward": 3.523605125291007,
"creation_time": 1742576399.8907251,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599929.pt"
]
},
{
"steps": 799965,
"file_path": "results/Huggy2/Huggy/Huggy-799965.onnx",
"reward": 3.5643650787559586,
"creation_time": 1742576658.5548892,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799965.pt"
]
},
{
"steps": 999993,
"file_path": "results/Huggy2/Huggy/Huggy-999993.onnx",
"reward": 3.7134113748564976,
"creation_time": 1742576926.3902302,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999993.pt"
]
},
{
"steps": 1199884,
"file_path": "results/Huggy2/Huggy/Huggy-1199884.onnx",
"reward": 4.112757023278769,
"creation_time": 1742577195.4990406,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199884.pt"
]
},
{
"steps": 1399936,
"file_path": "results/Huggy2/Huggy/Huggy-1399936.onnx",
"reward": 4.339951801300049,
"creation_time": 1742577451.9641507,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399936.pt"
]
},
{
"steps": 1599928,
"file_path": "results/Huggy2/Huggy/Huggy-1599928.onnx",
"reward": 3.8529030896839083,
"creation_time": 1742577702.3951015,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599928.pt"
]
},
{
"steps": 1799951,
"file_path": "results/Huggy2/Huggy/Huggy-1799951.onnx",
"reward": 3.9707968889689838,
"creation_time": 1742577971.6213427,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799951.pt"
]
},
{
"steps": 1999911,
"file_path": "results/Huggy2/Huggy/Huggy-1999911.onnx",
"reward": 4.0386715072851915,
"creation_time": 1742578245.12828,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999911.pt"
]
},
{
"steps": 2000661,
"file_path": "results/Huggy2/Huggy/Huggy-2000661.onnx",
"reward": 3.891759116694612,
"creation_time": 1742578245.2948322,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000661.pt"
]
}
],
"final_checkpoint": {
"steps": 2000661,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.891759116694612,
"creation_time": 1742578245.2948322,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000661.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}