ppo-Huggy / run_logs /training_status.json
Tingwen's picture
Huggy
5bf75d0
{
"Huggy": {
"checkpoints": [
{
"steps": 199571,
"file_path": "results/Huggy/Huggy/Huggy-199571.onnx",
"reward": 3.302404339114825,
"creation_time": 1683979446.679654,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199571.pt"
]
},
{
"steps": 399969,
"file_path": "results/Huggy/Huggy/Huggy-399969.onnx",
"reward": 3.3663784851462153,
"creation_time": 1683979674.9403968,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399969.pt"
]
},
{
"steps": 599971,
"file_path": "results/Huggy/Huggy/Huggy-599971.onnx",
"reward": 3.624334752559662,
"creation_time": 1683979905.1537566,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599971.pt"
]
},
{
"steps": 799966,
"file_path": "results/Huggy/Huggy/Huggy-799966.onnx",
"reward": 3.684055269164527,
"creation_time": 1683980136.5126808,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799966.pt"
]
},
{
"steps": 999996,
"file_path": "results/Huggy/Huggy/Huggy-999996.onnx",
"reward": 3.502801263600253,
"creation_time": 1683980380.0955896,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999996.pt"
]
},
{
"steps": 1199937,
"file_path": "results/Huggy/Huggy/Huggy-1199937.onnx",
"reward": 3.0091171264648438,
"creation_time": 1683980627.3796558,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199937.pt"
]
},
{
"steps": 1399744,
"file_path": "results/Huggy/Huggy/Huggy-1399744.onnx",
"reward": 3.8523632052254544,
"creation_time": 1683980868.0320663,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399744.pt"
]
},
{
"steps": 1599991,
"file_path": "results/Huggy/Huggy/Huggy-1599991.onnx",
"reward": 3.4991797422990203,
"creation_time": 1683981106.522424,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599991.pt"
]
},
{
"steps": 1799901,
"file_path": "results/Huggy/Huggy/Huggy-1799901.onnx",
"reward": 3.5933328780575073,
"creation_time": 1683981346.4992802,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799901.pt"
]
},
{
"steps": 1999938,
"file_path": "results/Huggy/Huggy/Huggy-1999938.onnx",
"reward": 0.493790070215861,
"creation_time": 1683981585.8878467,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999938.pt"
]
},
{
"steps": 2000064,
"file_path": "results/Huggy/Huggy/Huggy-2000064.onnx",
"reward": 1.8757670521736145,
"creation_time": 1683981586.0235317,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000064.pt"
]
}
],
"final_checkpoint": {
"steps": 2000064,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 1.8757670521736145,
"creation_time": 1683981586.0235317,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000064.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}