ppo-Huggy / run_logs /training_status.json
cmenasse's picture
Huggy
01d3e1c
{
"Huggy": {
"checkpoints": [
{
"steps": 199866,
"file_path": "results/Huggy/Huggy/Huggy-199866.onnx",
"reward": 3.4700606354212358,
"creation_time": 1671116804.6120782,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199866.pt"
]
},
{
"steps": 399782,
"file_path": "results/Huggy/Huggy/Huggy-399782.onnx",
"reward": 3.6228250457394506,
"creation_time": 1671117021.2848947,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399782.pt"
]
},
{
"steps": 599917,
"file_path": "results/Huggy/Huggy/Huggy-599917.onnx",
"reward": 4.655851785953228,
"creation_time": 1671117239.1732895,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599917.pt"
]
},
{
"steps": 799980,
"file_path": "results/Huggy/Huggy/Huggy-799980.onnx",
"reward": 3.865111602881016,
"creation_time": 1671117455.3031418,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799980.pt"
]
},
{
"steps": 999951,
"file_path": "results/Huggy/Huggy/Huggy-999951.onnx",
"reward": 3.784504069644175,
"creation_time": 1671117675.869018,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999951.pt"
]
},
{
"steps": 1199972,
"file_path": "results/Huggy/Huggy/Huggy-1199972.onnx",
"reward": 3.650700774598629,
"creation_time": 1671117898.0727124,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199972.pt"
]
},
{
"steps": 1399941,
"file_path": "results/Huggy/Huggy/Huggy-1399941.onnx",
"reward": 3.2604111433029175,
"creation_time": 1671118119.5279872,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399941.pt"
]
},
{
"steps": 1599965,
"file_path": "results/Huggy/Huggy/Huggy-1599965.onnx",
"reward": 3.9522728381449714,
"creation_time": 1671118339.295488,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599965.pt"
]
},
{
"steps": 1799997,
"file_path": "results/Huggy/Huggy/Huggy-1799997.onnx",
"reward": 3.973338121953218,
"creation_time": 1671118561.0310614,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799997.pt"
]
},
{
"steps": 1999906,
"file_path": "results/Huggy/Huggy/Huggy-1999906.onnx",
"reward": 3.986134910583496,
"creation_time": 1671118780.2427032,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999906.pt"
]
},
{
"steps": 2000018,
"file_path": "results/Huggy/Huggy/Huggy-2000018.onnx",
"reward": 4.0165044362427755,
"creation_time": 1671118780.358831,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000018.pt"
]
}
],
"final_checkpoint": {
"steps": 2000018,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.0165044362427755,
"creation_time": 1671118780.358831,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000018.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}