ppo-Huggy / run_logs /training_status.json
sd99's picture
Push PPO-Huggy model
b66fe9f
{
"Huggy": {
"checkpoints": [
{
"steps": 199895,
"file_path": "results/Huggy/Huggy/Huggy-199895.onnx",
"reward": 3.3854378203550977,
"creation_time": 1673076383.1171439,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199895.pt"
]
},
{
"steps": 399961,
"file_path": "results/Huggy/Huggy/Huggy-399961.onnx",
"reward": 3.6859035101803865,
"creation_time": 1673076600.8472774,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399961.pt"
]
},
{
"steps": 599970,
"file_path": "results/Huggy/Huggy/Huggy-599970.onnx",
"reward": 3.6993576396595347,
"creation_time": 1673076827.5979407,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599970.pt"
]
},
{
"steps": 799952,
"file_path": "results/Huggy/Huggy/Huggy-799952.onnx",
"reward": 3.76933221022288,
"creation_time": 1673077054.4809344,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799952.pt"
]
},
{
"steps": 999964,
"file_path": "results/Huggy/Huggy/Huggy-999964.onnx",
"reward": 4.006328114406849,
"creation_time": 1673077282.2612386,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999964.pt"
]
},
{
"steps": 1199868,
"file_path": "results/Huggy/Huggy/Huggy-1199868.onnx",
"reward": 4.026216417077988,
"creation_time": 1673077513.906878,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199868.pt"
]
},
{
"steps": 1399944,
"file_path": "results/Huggy/Huggy/Huggy-1399944.onnx",
"reward": 3.9557197703255547,
"creation_time": 1673077742.9312942,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399944.pt"
]
},
{
"steps": 1599947,
"file_path": "results/Huggy/Huggy/Huggy-1599947.onnx",
"reward": 3.7149494470407567,
"creation_time": 1673077976.1764328,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599947.pt"
]
},
{
"steps": 1799917,
"file_path": "results/Huggy/Huggy/Huggy-1799917.onnx",
"reward": 3.859301409177613,
"creation_time": 1673078207.7333076,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799917.pt"
]
},
{
"steps": 1999976,
"file_path": "results/Huggy/Huggy/Huggy-1999976.onnx",
"reward": 4.166005842147335,
"creation_time": 1673078439.8623884,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999976.pt"
]
},
{
"steps": 2000012,
"file_path": "results/Huggy/Huggy/Huggy-2000012.onnx",
"reward": 4.057798817753792,
"creation_time": 1673078439.9760938,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000012.pt"
]
}
],
"final_checkpoint": {
"steps": 2000012,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.057798817753792,
"creation_time": 1673078439.9760938,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000012.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}