ppo-Huggy / run_logs /training_status.json
yangzhou301's picture
Huggy
325b7ed
{
"Huggy": {
"checkpoints": [
{
"steps": 199967,
"file_path": "results/Huggy/Huggy/Huggy-199967.onnx",
"reward": 3.4475644309269753,
"creation_time": 1704136886.297605,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199967.pt"
]
},
{
"steps": 399803,
"file_path": "results/Huggy/Huggy/Huggy-399803.onnx",
"reward": 3.856155970133841,
"creation_time": 1704137136.4136105,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399803.pt"
]
},
{
"steps": 599919,
"file_path": "results/Huggy/Huggy/Huggy-599919.onnx",
"reward": 3.299131686870868,
"creation_time": 1704137387.4312747,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599919.pt"
]
},
{
"steps": 799934,
"file_path": "results/Huggy/Huggy/Huggy-799934.onnx",
"reward": 3.8729696389916657,
"creation_time": 1704137636.794091,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799934.pt"
]
},
{
"steps": 999992,
"file_path": "results/Huggy/Huggy/Huggy-999992.onnx",
"reward": 3.7193245151463676,
"creation_time": 1704137890.1584084,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999992.pt"
]
},
{
"steps": 1199933,
"file_path": "results/Huggy/Huggy/Huggy-1199933.onnx",
"reward": 3.614963186548111,
"creation_time": 1704138145.2844417,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199933.pt"
]
},
{
"steps": 1399987,
"file_path": "results/Huggy/Huggy/Huggy-1399987.onnx",
"reward": 3.5393231334770925,
"creation_time": 1704138399.2611926,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399987.pt"
]
},
{
"steps": 1599976,
"file_path": "results/Huggy/Huggy/Huggy-1599976.onnx",
"reward": 3.451010110503749,
"creation_time": 1704138651.6389093,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599976.pt"
]
},
{
"steps": 1799984,
"file_path": "results/Huggy/Huggy/Huggy-1799984.onnx",
"reward": 3.0045250919130115,
"creation_time": 1704138905.3764915,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799984.pt"
]
},
{
"steps": 1999428,
"file_path": "results/Huggy/Huggy/Huggy-1999428.onnx",
"reward": 3.323955382619585,
"creation_time": 1704139155.8610299,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999428.pt"
]
},
{
"steps": 2000178,
"file_path": "results/Huggy/Huggy/Huggy-2000178.onnx",
"reward": 3.2772935586618193,
"creation_time": 1704139156.0181017,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000178.pt"
]
}
],
"final_checkpoint": {
"steps": 2000178,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.2772935586618193,
"creation_time": 1704139156.0181017,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000178.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.2+cu121"
}
}