ppo-Huggy / run_logs /training_status.json
danielnbarbosa's picture
Huggy
a6dabcd verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199968,
"file_path": "results/Huggy2/Huggy/Huggy-199968.onnx",
"reward": 3.4577202623890293,
"creation_time": 1741975415.5959415,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199968.pt"
]
},
{
"steps": 399943,
"file_path": "results/Huggy2/Huggy/Huggy-399943.onnx",
"reward": 3.7754326828320823,
"creation_time": 1741975652.8080282,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399943.pt"
]
},
{
"steps": 599928,
"file_path": "results/Huggy2/Huggy/Huggy-599928.onnx",
"reward": 3.852039945977075,
"creation_time": 1741975902.0467753,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599928.pt"
]
},
{
"steps": 799765,
"file_path": "results/Huggy2/Huggy/Huggy-799765.onnx",
"reward": 3.930883824187804,
"creation_time": 1741976146.5613446,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799765.pt"
]
},
{
"steps": 999965,
"file_path": "results/Huggy2/Huggy/Huggy-999965.onnx",
"reward": 3.7532013978423744,
"creation_time": 1741976401.393219,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999965.pt"
]
},
{
"steps": 1199998,
"file_path": "results/Huggy2/Huggy/Huggy-1199998.onnx",
"reward": 3.9220545632498607,
"creation_time": 1741976656.4345953,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199998.pt"
]
},
{
"steps": 1399986,
"file_path": "results/Huggy2/Huggy/Huggy-1399986.onnx",
"reward": 3.7874186923850477,
"creation_time": 1741976915.1985233,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399986.pt"
]
},
{
"steps": 1599951,
"file_path": "results/Huggy2/Huggy/Huggy-1599951.onnx",
"reward": 3.8435614941323677,
"creation_time": 1741977168.8563683,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599951.pt"
]
},
{
"steps": 1799968,
"file_path": "results/Huggy2/Huggy/Huggy-1799968.onnx",
"reward": 4.00836627232401,
"creation_time": 1741977420.0568674,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799968.pt"
]
},
{
"steps": 1999921,
"file_path": "results/Huggy2/Huggy/Huggy-1999921.onnx",
"reward": 3.893062912899515,
"creation_time": 1741977674.691308,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999921.pt"
]
},
{
"steps": 2000050,
"file_path": "results/Huggy2/Huggy/Huggy-2000050.onnx",
"reward": 3.9600943426291146,
"creation_time": 1741977674.805308,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000050.pt"
]
}
],
"final_checkpoint": {
"steps": 2000050,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.9600943426291146,
"creation_time": 1741977674.805308,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000050.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}