HF-RL-Course-ppo-Huggy / run_logs /training_status.json
mvyboh's picture
Huggy
c1866e8 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199802,
"file_path": "results/Huggy2/Huggy/Huggy-199802.onnx",
"reward": 3.4383788733255294,
"creation_time": 1748427156.0372994,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199802.pt"
]
},
{
"steps": 399997,
"file_path": "results/Huggy2/Huggy/Huggy-399997.onnx",
"reward": 3.4481758314020494,
"creation_time": 1748427398.5607111,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399997.pt"
]
},
{
"steps": 599942,
"file_path": "results/Huggy2/Huggy/Huggy-599942.onnx",
"reward": 4.338088383277257,
"creation_time": 1748427643.4220765,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599942.pt"
]
},
{
"steps": 799956,
"file_path": "results/Huggy2/Huggy/Huggy-799956.onnx",
"reward": 3.7405120477956886,
"creation_time": 1748427887.3977168,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799956.pt"
]
},
{
"steps": 999944,
"file_path": "results/Huggy2/Huggy/Huggy-999944.onnx",
"reward": 4.065437452151225,
"creation_time": 1748428137.2660167,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999944.pt"
]
},
{
"steps": 1199530,
"file_path": "results/Huggy2/Huggy/Huggy-1199530.onnx",
"reward": 3.6435679511020056,
"creation_time": 1748428387.4046354,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199530.pt"
]
},
{
"steps": 1399322,
"file_path": "results/Huggy2/Huggy/Huggy-1399322.onnx",
"reward": 3.5517419229255864,
"creation_time": 1748428633.882331,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399322.pt"
]
},
{
"steps": 1599455,
"file_path": "results/Huggy2/Huggy/Huggy-1599455.onnx",
"reward": 3.558315017100038,
"creation_time": 1748428883.159094,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599455.pt"
]
},
{
"steps": 1799910,
"file_path": "results/Huggy2/Huggy/Huggy-1799910.onnx",
"reward": 3.120322427234134,
"creation_time": 1748429128.8335555,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799910.pt"
]
},
{
"steps": 1999970,
"file_path": "results/Huggy2/Huggy/Huggy-1999970.onnx",
"reward": 3.688946648382805,
"creation_time": 1748429371.3053522,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999970.pt"
]
},
{
"steps": 2000025,
"file_path": "results/Huggy2/Huggy/Huggy-2000025.onnx",
"reward": 3.6891500816478597,
"creation_time": 1748429371.4210136,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000025.pt"
]
}
],
"final_checkpoint": {
"steps": 2000025,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.6891500816478597,
"creation_time": 1748429371.4210136,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000025.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.7.0+cu126"
}
}