ppo-Huggy / run_logs /training_status.json
HuggingMachines's picture
Huggy
822d7db verified
{
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.8.0+cu128"
},
"Huggy": {
"checkpoints": [
{
"steps": 0,
"file_path": "results/Huggy2/Huggy/Huggy-0.onnx",
"reward": null,
"creation_time": 1763321210.372828,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-0.pt"
]
},
{
"steps": 199979,
"file_path": "results/Huggy2/Huggy/Huggy-199979.onnx",
"reward": 3.289300757827181,
"creation_time": 1763321503.557315,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199979.pt"
]
},
{
"steps": 399980,
"file_path": "results/Huggy2/Huggy/Huggy-399980.onnx",
"reward": 4.002624177727206,
"creation_time": 1763321741.975227,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399980.pt"
]
},
{
"steps": 599894,
"file_path": "results/Huggy2/Huggy/Huggy-599894.onnx",
"reward": 3.6047155332565306,
"creation_time": 1763321984.9240587,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599894.pt"
]
},
{
"steps": 799996,
"file_path": "results/Huggy2/Huggy/Huggy-799996.onnx",
"reward": 3.9474530017982095,
"creation_time": 1763322228.8325841,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799996.pt"
]
},
{
"steps": 999923,
"file_path": "results/Huggy2/Huggy/Huggy-999923.onnx",
"reward": 3.614661149631273,
"creation_time": 1763322473.830296,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999923.pt"
]
},
{
"steps": 1199915,
"file_path": "results/Huggy2/Huggy/Huggy-1199915.onnx",
"reward": 3.7647400286889847,
"creation_time": 1763322719.233554,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199915.pt"
]
},
{
"steps": 1399983,
"file_path": "results/Huggy2/Huggy/Huggy-1399983.onnx",
"reward": 4.262194782495499,
"creation_time": 1763322964.6878026,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399983.pt"
]
},
{
"steps": 1599900,
"file_path": "results/Huggy2/Huggy/Huggy-1599900.onnx",
"reward": 3.8434531217992562,
"creation_time": 1763323208.4227498,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599900.pt"
]
},
{
"steps": 1799941,
"file_path": "results/Huggy2/Huggy/Huggy-1799941.onnx",
"reward": 3.937045271076807,
"creation_time": 1763323458.2876673,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799941.pt"
]
},
{
"steps": 1999980,
"file_path": "results/Huggy2/Huggy/Huggy-1999980.onnx",
"reward": 3.9287983232669617,
"creation_time": 1763323707.3489892,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999980.pt"
]
},
{
"steps": 2000060,
"file_path": "results/Huggy2/Huggy/Huggy-2000060.onnx",
"reward": 3.94109953906801,
"creation_time": 1763323707.446735,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000060.pt"
]
}
],
"final_checkpoint": {
"steps": 2000060,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.94109953906801,
"creation_time": 1763323707.446735,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000060.pt"
]
}
}
}