ppo-Huggy / run_logs /training_status.json
DarylR's picture
Huggy
a2cb8f8 verified
{
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
},
"Huggy": {
"checkpoints": [
{
"steps": 0,
"file_path": "results/Fetch_Huggy/Huggy/Huggy-0.onnx",
"reward": null,
"creation_time": 1720644883.3757813,
"auxillary_file_paths": [
"results/Fetch_Huggy/Huggy/Huggy-0.pt"
]
},
{
"steps": 199862,
"file_path": "results/Fetch_Huggy/Huggy/Huggy-199862.onnx",
"reward": 3.399802430109544,
"creation_time": 1720645185.203252,
"auxillary_file_paths": [
"results/Fetch_Huggy/Huggy/Huggy-199862.pt"
]
},
{
"steps": 399991,
"file_path": "results/Fetch_Huggy/Huggy/Huggy-399991.onnx",
"reward": 3.573858734490215,
"creation_time": 1720645438.6975346,
"auxillary_file_paths": [
"results/Fetch_Huggy/Huggy/Huggy-399991.pt"
]
},
{
"steps": 599947,
"file_path": "results/Fetch_Huggy/Huggy/Huggy-599947.onnx",
"reward": 3.9377924109760083,
"creation_time": 1720645693.9051335,
"auxillary_file_paths": [
"results/Fetch_Huggy/Huggy/Huggy-599947.pt"
]
},
{
"steps": 799393,
"file_path": "results/Fetch_Huggy/Huggy/Huggy-799393.onnx",
"reward": 3.60022799265867,
"creation_time": 1720645949.299077,
"auxillary_file_paths": [
"results/Fetch_Huggy/Huggy/Huggy-799393.pt"
]
},
{
"steps": 999862,
"file_path": "results/Fetch_Huggy/Huggy/Huggy-999862.onnx",
"reward": 3.7129399349204206,
"creation_time": 1720646210.6209202,
"auxillary_file_paths": [
"results/Fetch_Huggy/Huggy/Huggy-999862.pt"
]
},
{
"steps": 1199919,
"file_path": "results/Fetch_Huggy/Huggy/Huggy-1199919.onnx",
"reward": 3.768470506278836,
"creation_time": 1720646467.5680072,
"auxillary_file_paths": [
"results/Fetch_Huggy/Huggy/Huggy-1199919.pt"
]
},
{
"steps": 1399979,
"file_path": "results/Fetch_Huggy/Huggy/Huggy-1399979.onnx",
"reward": 3.7936988852802336,
"creation_time": 1720646721.1082778,
"auxillary_file_paths": [
"results/Fetch_Huggy/Huggy/Huggy-1399979.pt"
]
},
{
"steps": 1599995,
"file_path": "results/Fetch_Huggy/Huggy/Huggy-1599995.onnx",
"reward": 3.5608450220181393,
"creation_time": 1720646978.6384296,
"auxillary_file_paths": [
"results/Fetch_Huggy/Huggy/Huggy-1599995.pt"
]
},
{
"steps": 1799907,
"file_path": "results/Fetch_Huggy/Huggy/Huggy-1799907.onnx",
"reward": 3.3940154861430734,
"creation_time": 1720647236.2384531,
"auxillary_file_paths": [
"results/Fetch_Huggy/Huggy/Huggy-1799907.pt"
]
},
{
"steps": 1999933,
"file_path": "results/Fetch_Huggy/Huggy/Huggy-1999933.onnx",
"reward": null,
"creation_time": 1720647492.6582289,
"auxillary_file_paths": [
"results/Fetch_Huggy/Huggy/Huggy-1999933.pt"
]
},
{
"steps": 2000003,
"file_path": "results/Fetch_Huggy/Huggy/Huggy-2000003.onnx",
"reward": 4.372549057006836,
"creation_time": 1720647492.7833061,
"auxillary_file_paths": [
"results/Fetch_Huggy/Huggy/Huggy-2000003.pt"
]
}
],
"final_checkpoint": {
"steps": 2000003,
"file_path": "results/Fetch_Huggy/Huggy.onnx",
"reward": 4.372549057006836,
"creation_time": 1720647492.7833061,
"auxillary_file_paths": [
"results/Fetch_Huggy/Huggy/Huggy-2000003.pt"
]
}
}
}