ppo-Huggy / run_logs /training_status.json
robinsk8a's picture
First commit Huggy
78997b8
{
"Huggy": {
"checkpoints": [
{
"steps": 199946,
"file_path": "results/Huggy/Huggy/Huggy-199946.onnx",
"reward": 3.5705900387977487,
"creation_time": 1674829944.3675854,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199946.pt"
]
},
{
"steps": 399843,
"file_path": "results/Huggy/Huggy/Huggy-399843.onnx",
"reward": 3.922442269996858,
"creation_time": 1674830158.619468,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399843.pt"
]
},
{
"steps": 599586,
"file_path": "results/Huggy/Huggy/Huggy-599586.onnx",
"reward": 3.9750537621347526,
"creation_time": 1674830378.716065,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599586.pt"
]
},
{
"steps": 799944,
"file_path": "results/Huggy/Huggy/Huggy-799944.onnx",
"reward": 3.874042366232191,
"creation_time": 1674830591.314684,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799944.pt"
]
},
{
"steps": 999950,
"file_path": "results/Huggy/Huggy/Huggy-999950.onnx",
"reward": 3.706449623749806,
"creation_time": 1674830809.525272,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999950.pt"
]
},
{
"steps": 1199996,
"file_path": "results/Huggy/Huggy/Huggy-1199996.onnx",
"reward": 3.949764745277271,
"creation_time": 1674831027.4618986,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199996.pt"
]
},
{
"steps": 1399919,
"file_path": "results/Huggy/Huggy/Huggy-1399919.onnx",
"reward": 5.342246055603027,
"creation_time": 1674831244.834406,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399919.pt"
]
},
{
"steps": 1599936,
"file_path": "results/Huggy/Huggy/Huggy-1599936.onnx",
"reward": 3.5601873899048027,
"creation_time": 1674831459.2459607,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599936.pt"
]
},
{
"steps": 1799914,
"file_path": "results/Huggy/Huggy/Huggy-1799914.onnx",
"reward": 3.8552541131273323,
"creation_time": 1674831678.5476396,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799914.pt"
]
},
{
"steps": 1999972,
"file_path": "results/Huggy/Huggy/Huggy-1999972.onnx",
"reward": 3.323107960489061,
"creation_time": 1674831898.251832,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999972.pt"
]
},
{
"steps": 2000014,
"file_path": "results/Huggy/Huggy/Huggy-2000014.onnx",
"reward": 3.3137612265089285,
"creation_time": 1674831898.3610954,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000014.pt"
]
}
],
"final_checkpoint": {
"steps": 2000014,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.3137612265089285,
"creation_time": 1674831898.3610954,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000014.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}