ppo-Huggy-Rl-agent / run_logs /training_status.json
Adilbai's picture
Huggy
ecac250 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199933,
"file_path": "results/Huggy2/Huggy/Huggy-199933.onnx",
"reward": 3.695315611548722,
"creation_time": 1749451036.4871655,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199933.pt"
]
},
{
"steps": 399938,
"file_path": "results/Huggy2/Huggy/Huggy-399938.onnx",
"reward": 3.608204595859234,
"creation_time": 1749451271.8097548,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399938.pt"
]
},
{
"steps": 599920,
"file_path": "results/Huggy2/Huggy/Huggy-599920.onnx",
"reward": 3.835383196671804,
"creation_time": 1749451506.168719,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599920.pt"
]
},
{
"steps": 799966,
"file_path": "results/Huggy2/Huggy/Huggy-799966.onnx",
"reward": 3.6328866561024173,
"creation_time": 1749451737.812619,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799966.pt"
]
},
{
"steps": 999748,
"file_path": "results/Huggy2/Huggy/Huggy-999748.onnx",
"reward": 3.6473524334101843,
"creation_time": 1749451973.645423,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999748.pt"
]
},
{
"steps": 1199265,
"file_path": "results/Huggy2/Huggy/Huggy-1199265.onnx",
"reward": 4.380673688191634,
"creation_time": 1749452210.924974,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199265.pt"
]
},
{
"steps": 1399932,
"file_path": "results/Huggy2/Huggy/Huggy-1399932.onnx",
"reward": 3.558903223589847,
"creation_time": 1749452447.1938033,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399932.pt"
]
},
{
"steps": 1599985,
"file_path": "results/Huggy2/Huggy/Huggy-1599985.onnx",
"reward": 3.4744282432322233,
"creation_time": 1749452685.239142,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599985.pt"
]
},
{
"steps": 1799997,
"file_path": "results/Huggy2/Huggy/Huggy-1799997.onnx",
"reward": 3.7421990650803294,
"creation_time": 1749452918.701592,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799997.pt"
]
},
{
"steps": 1999614,
"file_path": "results/Huggy2/Huggy/Huggy-1999614.onnx",
"reward": 2.7703558206558228,
"creation_time": 1749453154.1746519,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999614.pt"
]
},
{
"steps": 2000364,
"file_path": "results/Huggy2/Huggy/Huggy-2000364.onnx",
"reward": 1.966622999736241,
"creation_time": 1749453154.312738,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000364.pt"
]
}
],
"final_checkpoint": {
"steps": 2000364,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 1.966622999736241,
"creation_time": 1749453154.312738,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000364.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.7.1+cu126"
}
}