ppo-Huggy / run_logs /training_status.json
AmrMorgado's picture
Huggy
e117a3a
{
"Huggy": {
"checkpoints": [
{
"steps": 199948,
"file_path": "results/Huggy/Huggy/Huggy-199948.onnx",
"reward": 3.7635952499177723,
"creation_time": 1694804004.0768862,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199948.pt"
]
},
{
"steps": 399577,
"file_path": "results/Huggy/Huggy/Huggy-399577.onnx",
"reward": 3.6854134394276525,
"creation_time": 1694804243.445055,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399577.pt"
]
},
{
"steps": 599980,
"file_path": "results/Huggy/Huggy/Huggy-599980.onnx",
"reward": 3.3577280276351504,
"creation_time": 1694804481.829143,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599980.pt"
]
},
{
"steps": 799985,
"file_path": "results/Huggy/Huggy/Huggy-799985.onnx",
"reward": 3.8131586683207543,
"creation_time": 1694804721.5552804,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799985.pt"
]
},
{
"steps": 999994,
"file_path": "results/Huggy/Huggy/Huggy-999994.onnx",
"reward": 3.8695485930551183,
"creation_time": 1694804965.7546983,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999994.pt"
]
},
{
"steps": 1199934,
"file_path": "results/Huggy/Huggy/Huggy-1199934.onnx",
"reward": 4.0047982675688605,
"creation_time": 1694805208.9571033,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199934.pt"
]
},
{
"steps": 1399976,
"file_path": "results/Huggy/Huggy/Huggy-1399976.onnx",
"reward": 3.74280076318317,
"creation_time": 1694805448.1270406,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399976.pt"
]
},
{
"steps": 1599972,
"file_path": "results/Huggy/Huggy/Huggy-1599972.onnx",
"reward": 4.0632176415903585,
"creation_time": 1694805696.086158,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599972.pt"
]
},
{
"steps": 1799923,
"file_path": "results/Huggy/Huggy/Huggy-1799923.onnx",
"reward": 3.7542956488233217,
"creation_time": 1694805944.262247,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799923.pt"
]
},
{
"steps": 1999810,
"file_path": "results/Huggy/Huggy/Huggy-1999810.onnx",
"reward": 2.812379636547782,
"creation_time": 1694806189.3958445,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999810.pt"
]
},
{
"steps": 2000560,
"file_path": "results/Huggy/Huggy/Huggy-2000560.onnx",
"reward": 2.3010415186484656,
"creation_time": 1694806189.548177,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000560.pt"
]
}
],
"final_checkpoint": {
"steps": 2000560,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 2.3010415186484656,
"creation_time": 1694806189.548177,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000560.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}