ppo-Huggy-test / run_logs /training_status.json
miki030's picture
Huggy
9120542
{
"Huggy": {
"checkpoints": [
{
"steps": 199806,
"file_path": "results/Huggy/Huggy/Huggy-199806.onnx",
"reward": 3.510217934846878,
"creation_time": 1679992737.3339922,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199806.pt"
]
},
{
"steps": 399937,
"file_path": "results/Huggy/Huggy/Huggy-399937.onnx",
"reward": 3.8125986643135548,
"creation_time": 1679992971.3112574,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399937.pt"
]
},
{
"steps": 599964,
"file_path": "results/Huggy/Huggy/Huggy-599964.onnx",
"reward": 4.170349968804254,
"creation_time": 1679993210.211018,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599964.pt"
]
},
{
"steps": 799905,
"file_path": "results/Huggy/Huggy/Huggy-799905.onnx",
"reward": 3.725366231399713,
"creation_time": 1679993444.3311648,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799905.pt"
]
},
{
"steps": 999849,
"file_path": "results/Huggy/Huggy/Huggy-999849.onnx",
"reward": 4.120559979685776,
"creation_time": 1679993684.8837762,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999849.pt"
]
},
{
"steps": 1199957,
"file_path": "results/Huggy/Huggy/Huggy-1199957.onnx",
"reward": 4.3673203677347265,
"creation_time": 1679993921.657196,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199957.pt"
]
},
{
"steps": 1399985,
"file_path": "results/Huggy/Huggy/Huggy-1399985.onnx",
"reward": 3.8316304335190403,
"creation_time": 1679994157.204574,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399985.pt"
]
},
{
"steps": 1599964,
"file_path": "results/Huggy/Huggy/Huggy-1599964.onnx",
"reward": 4.1137921070478045,
"creation_time": 1679994395.7372289,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599964.pt"
]
},
{
"steps": 1799944,
"file_path": "results/Huggy/Huggy/Huggy-1799944.onnx",
"reward": 3.8239279377918978,
"creation_time": 1679994633.306664,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799944.pt"
]
},
{
"steps": 1999979,
"file_path": "results/Huggy/Huggy/Huggy-1999979.onnx",
"reward": 4.013361103606947,
"creation_time": 1679994869.8451536,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999979.pt"
]
},
{
"steps": 2000048,
"file_path": "results/Huggy/Huggy/Huggy-2000048.onnx",
"reward": 3.9623234026572285,
"creation_time": 1679994869.9621978,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000048.pt"
]
}
],
"final_checkpoint": {
"steps": 2000048,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.9623234026572285,
"creation_time": 1679994869.9621978,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000048.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}