ppo-Huggy / run_logs /training_status.json
Hrou's picture
Huggy
a03b618 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199634,
"file_path": "results/Huggy2/Huggy/Huggy-199634.onnx",
"reward": 3.425545623053366,
"creation_time": 1739125419.1396132,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199634.pt"
]
},
{
"steps": 399935,
"file_path": "results/Huggy2/Huggy/Huggy-399935.onnx",
"reward": 3.2124948574275507,
"creation_time": 1739125664.8385684,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399935.pt"
]
},
{
"steps": 599972,
"file_path": "results/Huggy2/Huggy/Huggy-599972.onnx",
"reward": 3.6859982950346812,
"creation_time": 1739125914.8021054,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599972.pt"
]
},
{
"steps": 799949,
"file_path": "results/Huggy2/Huggy/Huggy-799949.onnx",
"reward": 4.223853688944811,
"creation_time": 1739126157.9901648,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799949.pt"
]
},
{
"steps": 999988,
"file_path": "results/Huggy2/Huggy/Huggy-999988.onnx",
"reward": 4.013948372069826,
"creation_time": 1739126410.4873128,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999988.pt"
]
},
{
"steps": 1199571,
"file_path": "results/Huggy2/Huggy/Huggy-1199571.onnx",
"reward": 4.0968441764513654,
"creation_time": 1739126663.060753,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199571.pt"
]
},
{
"steps": 1399990,
"file_path": "results/Huggy2/Huggy/Huggy-1399990.onnx",
"reward": 3.9097193837165833,
"creation_time": 1739126918.4239933,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399990.pt"
]
},
{
"steps": 1599964,
"file_path": "results/Huggy2/Huggy/Huggy-1599964.onnx",
"reward": 3.631300881053462,
"creation_time": 1739127171.8940868,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599964.pt"
]
},
{
"steps": 1799327,
"file_path": "results/Huggy2/Huggy/Huggy-1799327.onnx",
"reward": 3.673715358106499,
"creation_time": 1739127428.5413094,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799327.pt"
]
},
{
"steps": 1999948,
"file_path": "results/Huggy2/Huggy/Huggy-1999948.onnx",
"reward": 3.6974991874874763,
"creation_time": 1739127685.2588468,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999948.pt"
]
},
{
"steps": 2000025,
"file_path": "results/Huggy2/Huggy/Huggy-2000025.onnx",
"reward": 3.738844193794109,
"creation_time": 1739127685.3794832,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000025.pt"
]
}
],
"final_checkpoint": {
"steps": 2000025,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.738844193794109,
"creation_time": 1739127685.3794832,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000025.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}