Update PPO_Trainer.py
Browse files- PPO_Trainer.py +1 -1
PPO_Trainer.py
CHANGED
|
@@ -17,7 +17,7 @@ HYPERPARAMETERS = {
|
|
| 17 |
'num_epochs_per_update': 10,
|
| 18 |
'batch_size': 64,
|
| 19 |
'num_steps_per_rollout': 2048,
|
| 20 |
-
'total_timesteps':
|
| 21 |
'hidden_layer_sizes': [512, 512, 512],
|
| 22 |
'save_interval_timesteps': 400000,
|
| 23 |
'log_interval_episodes': 10,
|
|
|
|
| 17 |
'num_epochs_per_update': 10,
|
| 18 |
'batch_size': 64,
|
| 19 |
'num_steps_per_rollout': 2048,
|
| 20 |
+
'total_timesteps': 7_000_000,
|
| 21 |
'hidden_layer_sizes': [512, 512, 512],
|
| 22 |
'save_interval_timesteps': 400000,
|
| 23 |
'log_interval_episodes': 10,
|