ppo-Pyramids / run_logs /training_status.json
Bjqrn's picture
First PPO
980eaf1
{
"Pyramids": {
"checkpoints": [
{
"steps": 499995,
"file_path": "results/Pyramids Training/Pyramids/Pyramids-499995.onnx",
"reward": 1.435999982059002,
"creation_time": 1699656511.3864174,
"auxillary_file_paths": [
"results/Pyramids Training/Pyramids/Pyramids-499995.pt"
]
},
{
"steps": 999991,
"file_path": "results/Pyramids Training/Pyramids/Pyramids-999991.onnx",
"reward": 1.9019999504089355,
"creation_time": 1699657768.272183,
"auxillary_file_paths": [
"results/Pyramids Training/Pyramids/Pyramids-999991.pt"
]
},
{
"steps": 1000247,
"file_path": "results/Pyramids Training/Pyramids/Pyramids-1000247.onnx",
"reward": 1.9019999504089355,
"creation_time": 1699657768.4198377,
"auxillary_file_paths": [
"results/Pyramids Training/Pyramids/Pyramids-1000247.pt"
]
}
],
"final_checkpoint": {
"steps": 1000247,
"file_path": "results/Pyramids Training/Pyramids.onnx",
"reward": 1.9019999504089355,
"creation_time": 1699657768.4198377,
"auxillary_file_paths": [
"results/Pyramids Training/Pyramids/Pyramids-1000247.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu121"
}
}