ppo-Huggy / run_logs /training_status.json
tmoroder's picture
Huggy trained!
a807c0d verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199996,
"file_path": "results/Huggy2/Huggy/Huggy-199996.onnx",
"reward": 3.54440800056738,
"creation_time": 1729105208.4048028,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199996.pt"
]
},
{
"steps": 399985,
"file_path": "results/Huggy2/Huggy/Huggy-399985.onnx",
"reward": 3.494238684282583,
"creation_time": 1729105455.7966013,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399985.pt"
]
},
{
"steps": 599988,
"file_path": "results/Huggy2/Huggy/Huggy-599988.onnx",
"reward": 3.215039903918902,
"creation_time": 1729105702.0353928,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599988.pt"
]
},
{
"steps": 799938,
"file_path": "results/Huggy2/Huggy/Huggy-799938.onnx",
"reward": 3.7998165722134747,
"creation_time": 1729105949.1646054,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799938.pt"
]
},
{
"steps": 999350,
"file_path": "results/Huggy2/Huggy/Huggy-999350.onnx",
"reward": 3.6324035312057634,
"creation_time": 1729106197.2263112,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999350.pt"
]
},
{
"steps": 1199744,
"file_path": "results/Huggy2/Huggy/Huggy-1199744.onnx",
"reward": 3.752461771602216,
"creation_time": 1729106446.8433845,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199744.pt"
]
},
{
"steps": 1399984,
"file_path": "results/Huggy2/Huggy/Huggy-1399984.onnx",
"reward": 3.566999695930384,
"creation_time": 1729106690.5558052,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399984.pt"
]
},
{
"steps": 1599994,
"file_path": "results/Huggy2/Huggy/Huggy-1599994.onnx",
"reward": 3.7723156335079566,
"creation_time": 1729106937.3383667,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599994.pt"
]
},
{
"steps": 1799959,
"file_path": "results/Huggy2/Huggy/Huggy-1799959.onnx",
"reward": 3.88447270286617,
"creation_time": 1729107185.545221,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799959.pt"
]
},
{
"steps": 1999989,
"file_path": "results/Huggy2/Huggy/Huggy-1999989.onnx",
"reward": 3.344224490225315,
"creation_time": 1729107430.8720896,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999989.pt"
]
},
{
"steps": 2000032,
"file_path": "results/Huggy2/Huggy/Huggy-2000032.onnx",
"reward": 3.154041243924035,
"creation_time": 1729107431.003551,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000032.pt"
]
}
],
"final_checkpoint": {
"steps": 2000032,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.154041243924035,
"creation_time": 1729107431.003551,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000032.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.4.1+cu121"
}
}