ppo-Huggy / run_logs /training_status.json
vkublytskyi's picture
Huggy
b0293de verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199833,
"file_path": "results/Huggy2/Huggy/Huggy-199833.onnx",
"reward": 3.0468871160860984,
"creation_time": 1745442270.2390132,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199833.pt"
]
},
{
"steps": 399984,
"file_path": "results/Huggy2/Huggy/Huggy-399984.onnx",
"reward": 3.627832794189453,
"creation_time": 1745442512.7593472,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399984.pt"
]
},
{
"steps": 599916,
"file_path": "results/Huggy2/Huggy/Huggy-599916.onnx",
"reward": 3.4526046091510403,
"creation_time": 1745442761.725538,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599916.pt"
]
},
{
"steps": 799903,
"file_path": "results/Huggy2/Huggy/Huggy-799903.onnx",
"reward": 3.844984374453674,
"creation_time": 1745443008.0178971,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799903.pt"
]
},
{
"steps": 999895,
"file_path": "results/Huggy2/Huggy/Huggy-999895.onnx",
"reward": 3.9477661430456075,
"creation_time": 1745443254.5084658,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999895.pt"
]
},
{
"steps": 1199935,
"file_path": "results/Huggy2/Huggy/Huggy-1199935.onnx",
"reward": 3.980570886024209,
"creation_time": 1745443500.7553515,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199935.pt"
]
},
{
"steps": 1399938,
"file_path": "results/Huggy2/Huggy/Huggy-1399938.onnx",
"reward": 3.5034031867980957,
"creation_time": 1745443745.6425838,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399938.pt"
]
},
{
"steps": 1599993,
"file_path": "results/Huggy2/Huggy/Huggy-1599993.onnx",
"reward": 3.8412255942821503,
"creation_time": 1745443980.4474702,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599993.pt"
]
},
{
"steps": 1799955,
"file_path": "results/Huggy2/Huggy/Huggy-1799955.onnx",
"reward": 3.907829959602917,
"creation_time": 1745444218.2757692,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799955.pt"
]
},
{
"steps": 1999923,
"file_path": "results/Huggy2/Huggy/Huggy-1999923.onnx",
"reward": 3.804103455846272,
"creation_time": 1745444460.0903006,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999923.pt"
]
},
{
"steps": 2000007,
"file_path": "results/Huggy2/Huggy/Huggy-2000007.onnx",
"reward": 3.83751923404634,
"creation_time": 1745444460.1974144,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000007.pt"
]
}
],
"final_checkpoint": {
"steps": 2000007,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.83751923404634,
"creation_time": 1745444460.1974144,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000007.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.7.0+cu126"
}
}