ppo-Huggy / run_logs /training_status.json
jnacey2's picture
Huggy
c186e28
{
"Huggy": {
"checkpoints": [
{
"steps": 199850,
"file_path": "results/Huggy/Huggy/Huggy-199850.onnx",
"reward": 3.2200691443065117,
"creation_time": 1672778209.8706906,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199850.pt"
]
},
{
"steps": 399916,
"file_path": "results/Huggy/Huggy/Huggy-399916.onnx",
"reward": 4.099930692712466,
"creation_time": 1672778423.0140724,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399916.pt"
]
},
{
"steps": 599954,
"file_path": "results/Huggy/Huggy/Huggy-599954.onnx",
"reward": 3.6201946205563016,
"creation_time": 1672778655.1143582,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599954.pt"
]
},
{
"steps": 799940,
"file_path": "results/Huggy/Huggy/Huggy-799940.onnx",
"reward": 3.8198044206137243,
"creation_time": 1672778889.1111503,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799940.pt"
]
},
{
"steps": 999970,
"file_path": "results/Huggy/Huggy/Huggy-999970.onnx",
"reward": 3.811782580485447,
"creation_time": 1672779130.5814545,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999970.pt"
]
},
{
"steps": 1199961,
"file_path": "results/Huggy/Huggy/Huggy-1199961.onnx",
"reward": 3.644477378238331,
"creation_time": 1672779371.7560503,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199961.pt"
]
},
{
"steps": 1399994,
"file_path": "results/Huggy/Huggy/Huggy-1399994.onnx",
"reward": 4.872321389970326,
"creation_time": 1672779617.7095973,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399994.pt"
]
},
{
"steps": 1599922,
"file_path": "results/Huggy/Huggy/Huggy-1599922.onnx",
"reward": 3.799795187200819,
"creation_time": 1672779855.5382774,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599922.pt"
]
},
{
"steps": 1799985,
"file_path": "results/Huggy/Huggy/Huggy-1799985.onnx",
"reward": 3.757765293569493,
"creation_time": 1672780096.7195635,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799985.pt"
]
},
{
"steps": 1999930,
"file_path": "results/Huggy/Huggy/Huggy-1999930.onnx",
"reward": 3.6870718652551826,
"creation_time": 1672780337.340689,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999930.pt"
]
},
{
"steps": 2000033,
"file_path": "results/Huggy/Huggy/Huggy-2000033.onnx",
"reward": 3.715164106283615,
"creation_time": 1672780337.4633427,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000033.pt"
]
}
],
"final_checkpoint": {
"steps": 2000033,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.715164106283615,
"creation_time": 1672780337.4633427,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000033.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}