ppo-Huggy / run_logs /training_status.json
malanevans's picture
Huggy
f3bf724
{
"Huggy": {
"checkpoints": [
{
"steps": 199906,
"file_path": "results/Malan's Huggy/Huggy/Huggy-199906.onnx",
"reward": 3.40451553202512,
"creation_time": 1681433519.0485196,
"auxillary_file_paths": [
"results/Malan's Huggy/Huggy/Huggy-199906.pt"
]
},
{
"steps": 399879,
"file_path": "results/Malan's Huggy/Huggy/Huggy-399879.onnx",
"reward": 3.7293476646596737,
"creation_time": 1681433752.738485,
"auxillary_file_paths": [
"results/Malan's Huggy/Huggy/Huggy-399879.pt"
]
},
{
"steps": 599899,
"file_path": "results/Malan's Huggy/Huggy/Huggy-599899.onnx",
"reward": 3.414127323362562,
"creation_time": 1681433984.9118588,
"auxillary_file_paths": [
"results/Malan's Huggy/Huggy/Huggy-599899.pt"
]
},
{
"steps": 799986,
"file_path": "results/Malan's Huggy/Huggy/Huggy-799986.onnx",
"reward": 3.690888990724788,
"creation_time": 1681434216.937959,
"auxillary_file_paths": [
"results/Malan's Huggy/Huggy/Huggy-799986.pt"
]
},
{
"steps": 999929,
"file_path": "results/Malan's Huggy/Huggy/Huggy-999929.onnx",
"reward": 3.6740736078541234,
"creation_time": 1681434458.9049973,
"auxillary_file_paths": [
"results/Malan's Huggy/Huggy/Huggy-999929.pt"
]
},
{
"steps": 1199902,
"file_path": "results/Malan's Huggy/Huggy/Huggy-1199902.onnx",
"reward": 3.5516290133649653,
"creation_time": 1681434695.7775948,
"auxillary_file_paths": [
"results/Malan's Huggy/Huggy/Huggy-1199902.pt"
]
},
{
"steps": 1399571,
"file_path": "results/Malan's Huggy/Huggy/Huggy-1399571.onnx",
"reward": 3.7713354114749005,
"creation_time": 1681434931.6109848,
"auxillary_file_paths": [
"results/Malan's Huggy/Huggy/Huggy-1399571.pt"
]
},
{
"steps": 1599995,
"file_path": "results/Malan's Huggy/Huggy/Huggy-1599995.onnx",
"reward": 4.00887358397037,
"creation_time": 1681435168.1379478,
"auxillary_file_paths": [
"results/Malan's Huggy/Huggy/Huggy-1599995.pt"
]
},
{
"steps": 1799386,
"file_path": "results/Malan's Huggy/Huggy/Huggy-1799386.onnx",
"reward": 3.700092361523555,
"creation_time": 1681435399.4978082,
"auxillary_file_paths": [
"results/Malan's Huggy/Huggy/Huggy-1799386.pt"
]
},
{
"steps": 1999981,
"file_path": "results/Malan's Huggy/Huggy/Huggy-1999981.onnx",
"reward": 4.615406262874603,
"creation_time": 1681435634.2345743,
"auxillary_file_paths": [
"results/Malan's Huggy/Huggy/Huggy-1999981.pt"
]
},
{
"steps": 2000043,
"file_path": "results/Malan's Huggy/Huggy/Huggy-2000043.onnx",
"reward": 4.535808915183658,
"creation_time": 1681435634.365787,
"auxillary_file_paths": [
"results/Malan's Huggy/Huggy/Huggy-2000043.pt"
]
}
],
"final_checkpoint": {
"steps": 2000043,
"file_path": "results/Malan's Huggy/Huggy.onnx",
"reward": 4.535808915183658,
"creation_time": 1681435634.365787,
"auxillary_file_paths": [
"results/Malan's Huggy/Huggy/Huggy-2000043.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}