ppo-Huggy / run_logs /training_status.json
AmirZeraati's picture
Huggy
3777105 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199754,
"file_path": "results/Huggy2/Huggy/Huggy-199754.onnx",
"reward": 3.421825357278188,
"creation_time": 1753184989.5080032,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199754.pt"
]
},
{
"steps": 399890,
"file_path": "results/Huggy2/Huggy/Huggy-399890.onnx",
"reward": 3.4561519643007697,
"creation_time": 1753185234.8691545,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399890.pt"
]
},
{
"steps": 599910,
"file_path": "results/Huggy2/Huggy/Huggy-599910.onnx",
"reward": 2.683552525937557,
"creation_time": 1753185483.2831283,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599910.pt"
]
},
{
"steps": 799732,
"file_path": "results/Huggy2/Huggy/Huggy-799732.onnx",
"reward": 3.6602265563553678,
"creation_time": 1753185737.7803552,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799732.pt"
]
},
{
"steps": 999878,
"file_path": "results/Huggy2/Huggy/Huggy-999878.onnx",
"reward": 3.9619891578736515,
"creation_time": 1753185999.028557,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999878.pt"
]
},
{
"steps": 1199919,
"file_path": "results/Huggy2/Huggy/Huggy-1199919.onnx",
"reward": 3.9417155461926616,
"creation_time": 1753186261.6334147,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199919.pt"
]
},
{
"steps": 1399936,
"file_path": "results/Huggy2/Huggy/Huggy-1399936.onnx",
"reward": 3.588900270632335,
"creation_time": 1753186524.2340262,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399936.pt"
]
},
{
"steps": 1599288,
"file_path": "results/Huggy2/Huggy/Huggy-1599288.onnx",
"reward": 3.9644572111693295,
"creation_time": 1753186787.999774,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599288.pt"
]
},
{
"steps": 1799945,
"file_path": "results/Huggy2/Huggy/Huggy-1799945.onnx",
"reward": 4.12756069501241,
"creation_time": 1753187050.750145,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799945.pt"
]
},
{
"steps": 1999923,
"file_path": "results/Huggy2/Huggy/Huggy-1999923.onnx",
"reward": 3.7407151284671967,
"creation_time": 1753187301.9398353,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999923.pt"
]
},
{
"steps": 2000673,
"file_path": "results/Huggy2/Huggy/Huggy-2000673.onnx",
"reward": 3.7140326934968124,
"creation_time": 1753187302.0894241,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000673.pt"
]
}
],
"final_checkpoint": {
"steps": 2000673,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.7140326934968124,
"creation_time": 1753187302.0894241,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000673.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.7.1+cu126"
}
}