ppo-Huggy / run_logs /training_status.json
Falguni's picture
Upload Huggy trained with ml-agents ppo
e768a64
{
"Huggy": {
"checkpoints": [
{
"steps": 199955,
"file_path": "results/Huggy/Huggy/Huggy-199955.onnx",
"reward": 3.134714987180004,
"creation_time": 1682465244.2636993,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199955.pt"
]
},
{
"steps": 399878,
"file_path": "results/Huggy/Huggy/Huggy-399878.onnx",
"reward": 3.6128534678671813,
"creation_time": 1682465485.057559,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399878.pt"
]
},
{
"steps": 599873,
"file_path": "results/Huggy/Huggy/Huggy-599873.onnx",
"reward": 3.9419172781485097,
"creation_time": 1682465728.359623,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599873.pt"
]
},
{
"steps": 799912,
"file_path": "results/Huggy/Huggy/Huggy-799912.onnx",
"reward": 4.007534398530659,
"creation_time": 1682465968.9178078,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799912.pt"
]
},
{
"steps": 999973,
"file_path": "results/Huggy/Huggy/Huggy-999973.onnx",
"reward": 3.8615464258652468,
"creation_time": 1682466214.2446396,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999973.pt"
]
},
{
"steps": 1199934,
"file_path": "results/Huggy/Huggy/Huggy-1199934.onnx",
"reward": 4.197402656763449,
"creation_time": 1682466465.093838,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199934.pt"
]
},
{
"steps": 1399933,
"file_path": "results/Huggy/Huggy/Huggy-1399933.onnx",
"reward": 3.9441867641040256,
"creation_time": 1682466718.834264,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399933.pt"
]
},
{
"steps": 1599912,
"file_path": "results/Huggy/Huggy/Huggy-1599912.onnx",
"reward": 3.9405184718065485,
"creation_time": 1682466968.6190674,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599912.pt"
]
},
{
"steps": 1799996,
"file_path": "results/Huggy/Huggy/Huggy-1799996.onnx",
"reward": 3.830255868106053,
"creation_time": 1682467224.261764,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799996.pt"
]
},
{
"steps": 1999979,
"file_path": "results/Huggy/Huggy/Huggy-1999979.onnx",
"reward": 4.25036346912384,
"creation_time": 1682467481.4748223,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999979.pt"
]
},
{
"steps": 2000098,
"file_path": "results/Huggy/Huggy/Huggy-2000098.onnx",
"reward": 4.2631177861811755,
"creation_time": 1682467481.6031106,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000098.pt"
]
}
],
"final_checkpoint": {
"steps": 2000098,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.2631177861811755,
"creation_time": 1682467481.6031106,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000098.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}