ppo-Huggy / run_logs /training_status.json
DengJunTTT's picture
Huggy
7604cde verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199885,
"file_path": "results/Huggy2/Huggy/Huggy-199885.onnx",
"reward": 3.558358775941949,
"creation_time": 1744981811.9946303,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199885.pt"
]
},
{
"steps": 399944,
"file_path": "results/Huggy2/Huggy/Huggy-399944.onnx",
"reward": 3.3894512203504457,
"creation_time": 1744982056.4155722,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399944.pt"
]
},
{
"steps": 599879,
"file_path": "results/Huggy2/Huggy/Huggy-599879.onnx",
"reward": 4.508080618722098,
"creation_time": 1744982319.646462,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599879.pt"
]
},
{
"steps": 799929,
"file_path": "results/Huggy2/Huggy/Huggy-799929.onnx",
"reward": 3.8903285793378846,
"creation_time": 1744982582.9500644,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799929.pt"
]
},
{
"steps": 999961,
"file_path": "results/Huggy2/Huggy/Huggy-999961.onnx",
"reward": 3.832655863968406,
"creation_time": 1744982842.1418042,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999961.pt"
]
},
{
"steps": 1199929,
"file_path": "results/Huggy2/Huggy/Huggy-1199929.onnx",
"reward": 3.9013307794692023,
"creation_time": 1744983106.3314574,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199929.pt"
]
},
{
"steps": 1399959,
"file_path": "results/Huggy2/Huggy/Huggy-1399959.onnx",
"reward": 3.9307702127593513,
"creation_time": 1744983385.584814,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399959.pt"
]
},
{
"steps": 1599981,
"file_path": "results/Huggy2/Huggy/Huggy-1599981.onnx",
"reward": 4.088741679986318,
"creation_time": 1744983644.6137073,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599981.pt"
]
},
{
"steps": 1799915,
"file_path": "results/Huggy2/Huggy/Huggy-1799915.onnx",
"reward": 3.7446256279945374,
"creation_time": 1744983896.8990583,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799915.pt"
]
},
{
"steps": 1999991,
"file_path": "results/Huggy2/Huggy/Huggy-1999991.onnx",
"reward": 3.2026373744010925,
"creation_time": 1744984142.213013,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999991.pt"
]
},
{
"steps": 2000090,
"file_path": "results/Huggy2/Huggy/Huggy-2000090.onnx",
"reward": 3.599482430352105,
"creation_time": 1744984142.3370812,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000090.pt"
]
}
],
"final_checkpoint": {
"steps": 2000090,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.599482430352105,
"creation_time": 1744984142.3370812,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000090.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}