ppo-Huggy / run_logs /training_status.json
ArunKr's picture
Huggy
6f84340 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199983,
"file_path": "results/Huggy2/Huggy/Huggy-199983.onnx",
"reward": 3.3314630898737136,
"creation_time": 1755231010.302093,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199983.pt"
]
},
{
"steps": 399995,
"file_path": "results/Huggy2/Huggy/Huggy-399995.onnx",
"reward": 3.522081726955043,
"creation_time": 1755231256.9476607,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399995.pt"
]
},
{
"steps": 599440,
"file_path": "results/Huggy2/Huggy/Huggy-599440.onnx",
"reward": 4.709053589747502,
"creation_time": 1755231503.534968,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599440.pt"
]
},
{
"steps": 799964,
"file_path": "results/Huggy2/Huggy/Huggy-799964.onnx",
"reward": 3.964709790504497,
"creation_time": 1755231754.8495493,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799964.pt"
]
},
{
"steps": 999891,
"file_path": "results/Huggy2/Huggy/Huggy-999891.onnx",
"reward": 3.815961341216014,
"creation_time": 1755232008.9653819,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999891.pt"
]
},
{
"steps": 1199971,
"file_path": "results/Huggy2/Huggy/Huggy-1199971.onnx",
"reward": 3.847493415698409,
"creation_time": 1755232260.609992,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199971.pt"
]
},
{
"steps": 1399976,
"file_path": "results/Huggy2/Huggy/Huggy-1399976.onnx",
"reward": 4.467867374420166,
"creation_time": 1755232510.8586996,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399976.pt"
]
},
{
"steps": 1599976,
"file_path": "results/Huggy2/Huggy/Huggy-1599976.onnx",
"reward": 3.608792144363209,
"creation_time": 1755232753.5088084,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599976.pt"
]
},
{
"steps": 1799420,
"file_path": "results/Huggy2/Huggy/Huggy-1799420.onnx",
"reward": 3.993763691297284,
"creation_time": 1755233002.6661232,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799420.pt"
]
},
{
"steps": 1999902,
"file_path": "results/Huggy2/Huggy/Huggy-1999902.onnx",
"reward": 3.388867452377226,
"creation_time": 1755233247.6028361,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999902.pt"
]
},
{
"steps": 2000099,
"file_path": "results/Huggy2/Huggy/Huggy-2000099.onnx",
"reward": 3.3559091798094816,
"creation_time": 1755233247.753807,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000099.pt"
]
}
],
"final_checkpoint": {
"steps": 2000099,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.3559091798094816,
"creation_time": 1755233247.753807,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000099.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.8.0+cu128"
}
}