ppo-Huggy / run_logs /training_status.json
sid's picture
Huggy
bd2ea42
{
"Huggy": {
"checkpoints": [
{
"steps": 199809,
"file_path": "results/Huggy/Huggy/Huggy-199809.onnx",
"reward": 3.1985227735713124,
"creation_time": 1687209628.5875049,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199809.pt"
]
},
{
"steps": 399985,
"file_path": "results/Huggy/Huggy/Huggy-399985.onnx",
"reward": 3.646662509802616,
"creation_time": 1687209754.3638043,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399985.pt"
]
},
{
"steps": 599929,
"file_path": "results/Huggy/Huggy/Huggy-599929.onnx",
"reward": 4.355622172355652,
"creation_time": 1687209881.297912,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599929.pt"
]
},
{
"steps": 799970,
"file_path": "results/Huggy/Huggy/Huggy-799970.onnx",
"reward": 3.6924892322884664,
"creation_time": 1687210009.8241415,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799970.pt"
]
},
{
"steps": 999956,
"file_path": "results/Huggy/Huggy/Huggy-999956.onnx",
"reward": 3.8798341448490437,
"creation_time": 1687210138.482775,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999956.pt"
]
},
{
"steps": 1199996,
"file_path": "results/Huggy/Huggy/Huggy-1199996.onnx",
"reward": 3.9598602214280296,
"creation_time": 1687210268.2280126,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199996.pt"
]
},
{
"steps": 1399920,
"file_path": "results/Huggy/Huggy/Huggy-1399920.onnx",
"reward": 3.363662230968475,
"creation_time": 1687210398.7716117,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399920.pt"
]
},
{
"steps": 1599964,
"file_path": "results/Huggy/Huggy/Huggy-1599964.onnx",
"reward": 3.633514181939428,
"creation_time": 1687210527.3575332,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599964.pt"
]
},
{
"steps": 1799719,
"file_path": "results/Huggy/Huggy/Huggy-1799719.onnx",
"reward": 3.6490069347269394,
"creation_time": 1687210656.7604334,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799719.pt"
]
},
{
"steps": 1999969,
"file_path": "results/Huggy/Huggy/Huggy-1999969.onnx",
"reward": 3.992034407456716,
"creation_time": 1687210785.8825693,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999969.pt"
]
},
{
"steps": 2000005,
"file_path": "results/Huggy/Huggy/Huggy-2000005.onnx",
"reward": 3.9192728957822247,
"creation_time": 1687210785.9498959,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000005.pt"
]
}
],
"final_checkpoint": {
"steps": 2000005,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.9192728957822247,
"creation_time": 1687210785.9498959,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000005.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.8.1+cu102"
}
}