ppo-Huggy-1 / run_logs /training_status.json
ravkumar's picture
Huggy
58b0965
{
"Huggy": {
"checkpoints": [
{
"steps": 199984,
"file_path": "results/Huggy/Huggy/Huggy-199984.onnx",
"reward": 3.4035648392207585,
"creation_time": 1683403485.9632452,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199984.pt"
]
},
{
"steps": 399922,
"file_path": "results/Huggy/Huggy/Huggy-399922.onnx",
"reward": 3.7837587884730763,
"creation_time": 1683403713.3809452,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399922.pt"
]
},
{
"steps": 599973,
"file_path": "results/Huggy/Huggy/Huggy-599973.onnx",
"reward": 3.3314537796480903,
"creation_time": 1683403944.718084,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599973.pt"
]
},
{
"steps": 799542,
"file_path": "results/Huggy/Huggy/Huggy-799542.onnx",
"reward": 3.7924135337615836,
"creation_time": 1683404177.1373453,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799542.pt"
]
},
{
"steps": 999929,
"file_path": "results/Huggy/Huggy/Huggy-999929.onnx",
"reward": 3.789855630520512,
"creation_time": 1683404412.4809792,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999929.pt"
]
},
{
"steps": 1199923,
"file_path": "results/Huggy/Huggy/Huggy-1199923.onnx",
"reward": 3.7724183890081586,
"creation_time": 1683404642.7183988,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199923.pt"
]
},
{
"steps": 1399967,
"file_path": "results/Huggy/Huggy/Huggy-1399967.onnx",
"reward": 3.3947581819125583,
"creation_time": 1683404872.9410276,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399967.pt"
]
},
{
"steps": 1599266,
"file_path": "results/Huggy/Huggy/Huggy-1599266.onnx",
"reward": 3.955237402855975,
"creation_time": 1683405102.5937946,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599266.pt"
]
},
{
"steps": 1799964,
"file_path": "results/Huggy/Huggy/Huggy-1799964.onnx",
"reward": 3.8431178036424303,
"creation_time": 1683405340.9541156,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799964.pt"
]
},
{
"steps": 1999966,
"file_path": "results/Huggy/Huggy/Huggy-1999966.onnx",
"reward": 3.6734133618218556,
"creation_time": 1683405577.3904102,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999966.pt"
]
},
{
"steps": 2000019,
"file_path": "results/Huggy/Huggy/Huggy-2000019.onnx",
"reward": 3.656835983196894,
"creation_time": 1683405577.5875778,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000019.pt"
]
}
],
"final_checkpoint": {
"steps": 2000019,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.656835983196894,
"creation_time": 1683405577.5875778,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000019.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}