ppo-Huggy / run_logs /training_status.json
Sam017's picture
Huggy
a8d2839 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 0,
"file_path": "results/Huggy2/Huggy/Huggy-0.onnx",
"reward": null,
"creation_time": 1748941406.444438,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-0.pt"
]
},
{
"steps": 199945,
"file_path": "results/Huggy2/Huggy/Huggy-199945.onnx",
"reward": 3.305153879626044,
"creation_time": 1748944591.453936,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199945.pt"
]
},
{
"steps": 399971,
"file_path": "results/Huggy2/Huggy/Huggy-399971.onnx",
"reward": 3.651497954752908,
"creation_time": 1748945033.1469195,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399971.pt"
]
},
{
"steps": 599852,
"file_path": "results/Huggy2/Huggy/Huggy-599852.onnx",
"reward": 4.044171660034745,
"creation_time": 1748945470.7899847,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599852.pt"
]
},
{
"steps": 799943,
"file_path": "results/Huggy2/Huggy/Huggy-799943.onnx",
"reward": 3.832039082606222,
"creation_time": 1748945900.3693635,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799943.pt"
]
},
{
"steps": 999956,
"file_path": "results/Huggy2/Huggy/Huggy-999956.onnx",
"reward": 3.7663881526225143,
"creation_time": 1748946345.8974438,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999956.pt"
]
},
{
"steps": 1199943,
"file_path": "results/Huggy2/Huggy/Huggy-1199943.onnx",
"reward": 3.954370750002114,
"creation_time": 1748946788.1585815,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199943.pt"
]
},
{
"steps": 1399955,
"file_path": "results/Huggy2/Huggy/Huggy-1399955.onnx",
"reward": 3.595871246286801,
"creation_time": 1748947261.5907428,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399955.pt"
]
},
{
"steps": 1599968,
"file_path": "results/Huggy2/Huggy/Huggy-1599968.onnx",
"reward": 4.049666898088022,
"creation_time": 1748947693.6361015,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599968.pt"
]
},
{
"steps": 1799943,
"file_path": "results/Huggy2/Huggy/Huggy-1799943.onnx",
"reward": 4.114564000749263,
"creation_time": 1748948141.605763,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799943.pt"
]
},
{
"steps": 1835687,
"file_path": "results/Huggy2/Huggy/Huggy-1835687.onnx",
"reward": 3.6620664589288756,
"creation_time": 1748948223.907404,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1835687.pt"
]
},
{
"steps": 1999968,
"file_path": "results/Huggy2/Huggy/Huggy-1999968.onnx",
"reward": 4.014259666734743,
"creation_time": 1748948602.263625,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999968.pt"
]
},
{
"steps": 2000117,
"file_path": "results/Huggy2/Huggy/Huggy-2000117.onnx",
"reward": 4.022125189185988,
"creation_time": 1748948602.468449,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000117.pt"
]
}
],
"final_checkpoint": {
"steps": 2000117,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.022125189185988,
"creation_time": 1748948602.468449,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000117.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.7.0+cu126"
}
}