ppo-Huggy / run_logs /training_status.json
APRG's picture
Huggy
7123d59 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199970,
"file_path": "results/Huggy2/Huggy/Huggy-199970.onnx",
"reward": 3.437627968503468,
"creation_time": 1741769478.4989874,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199970.pt"
]
},
{
"steps": 399794,
"file_path": "results/Huggy2/Huggy/Huggy-399794.onnx",
"reward": 3.5884011932781763,
"creation_time": 1741769715.3500905,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399794.pt"
]
},
{
"steps": 599936,
"file_path": "results/Huggy2/Huggy/Huggy-599936.onnx",
"reward": 3.14332749265613,
"creation_time": 1741769954.685244,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599936.pt"
]
},
{
"steps": 799992,
"file_path": "results/Huggy2/Huggy/Huggy-799992.onnx",
"reward": 3.976896569635091,
"creation_time": 1741770197.812444,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799992.pt"
]
},
{
"steps": 999942,
"file_path": "results/Huggy2/Huggy/Huggy-999942.onnx",
"reward": 3.76480520516634,
"creation_time": 1741770442.7128642,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999942.pt"
]
},
{
"steps": 1199975,
"file_path": "results/Huggy2/Huggy/Huggy-1199975.onnx",
"reward": 3.844263920560479,
"creation_time": 1741770684.96855,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199975.pt"
]
},
{
"steps": 1399996,
"file_path": "results/Huggy2/Huggy/Huggy-1399996.onnx",
"reward": 3.7087503223315528,
"creation_time": 1741770922.9124575,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399996.pt"
]
},
{
"steps": 1599985,
"file_path": "results/Huggy2/Huggy/Huggy-1599985.onnx",
"reward": 4.037059426307678,
"creation_time": 1741771166.4438365,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599985.pt"
]
},
{
"steps": 1799948,
"file_path": "results/Huggy2/Huggy/Huggy-1799948.onnx",
"reward": 3.7144582238826125,
"creation_time": 1741771410.9704041,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799948.pt"
]
},
{
"steps": 1999933,
"file_path": "results/Huggy2/Huggy/Huggy-1999933.onnx",
"reward": 4.3254656961985996,
"creation_time": 1741771655.4602296,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999933.pt"
]
},
{
"steps": 2000001,
"file_path": "results/Huggy2/Huggy/Huggy-2000001.onnx",
"reward": 4.340121666590373,
"creation_time": 1741771655.6094306,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000001.pt"
]
}
],
"final_checkpoint": {
"steps": 2000001,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.340121666590373,
"creation_time": 1741771655.6094306,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000001.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}