File size: 3,672 Bytes
155c5b6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
# Parameters for Actor:
# ==============================================================================
Actor.activation = 'leaky_relu'
Actor.cont_dist_kind = 'normal'
Actor.d_hidden = 300
Actor.dropout_p = 0.0
Actor.gmm_modes = 5
Actor.log_std_high = 2.0
Actor.log_std_low = -5.0
Actor.n_layers = 2

# Parameters for Agent:
# ==============================================================================
Agent.fake_filter = False
Agent.gamma = 0.999
Agent.num_critics = 4
Agent.num_critics_td = 2
Agent.offline_coeff = 1.0
Agent.online_coeff = 0.0
Agent.popart = True
Agent.reward_multiplier = 10.0
Agent.tau = 0.003
Agent.use_multigamma = True
Agent.use_target_actor = True

# Parameters for Experiment:
# ==============================================================================
Experiment.batches_per_update = 1
Experiment.critic_loss_weight = 10.0
Experiment.env_mode = 'async'
Experiment.force_reset_train_envs_every = None
Experiment.grad_clip = 1.0
Experiment.has_replay_buffer_rights = True
Experiment.l2_coeff = 0.001
Experiment.learning_rate = 0.0001
Experiment.local_time_optimizer = False
Experiment.lr_warmup_steps = 500
Experiment.mixed_precision = 'no'
Experiment.padded_sampling = 'none'
Experiment.save_trajs_as = 'npz'
Experiment.stagger_traj_file_lengths = True
Experiment.wandb_group_name = None

# Parameters for FlashAttention:
# ==============================================================================
FlashAttention.window_size = (-1, -1)

# Parameters for MetamonTstepEncoder:
# ==============================================================================
MetamonTstepEncoder.d_model = 100
MetamonTstepEncoder.extra_emb_dim = 18
MetamonTstepEncoder.n_heads = 5
MetamonTstepEncoder.n_layers = 3
MetamonTstepEncoder.scratch_tokens = 4
MetamonTstepEncoder.token_mask_aug = False

# Parameters for Multigammas:
# ==============================================================================
Multigammas.continuous = [0.1, 0.9, 0.95, 0.97, 0.99, 0.995]
Multigammas.discrete = [0.1, 0.9, 0.95, 0.97, 0.99, 0.995]

# Parameters for MultiModalEmbedding:
# ==============================================================================
MultiModalEmbedding.dropout = 0.05
MultiModalEmbedding.numerical_tokens = 6

# Parameters for NCritics:
# ==============================================================================
NCritics.activation = 'leaky_relu'
NCritics.d_hidden = 300
NCritics.dropout_p = 0.0
NCritics.n_layers = 2

# Parameters for PopArtLayer:
# ==============================================================================
PopArtLayer.beta = 0.0005
PopArtLayer.init_nu = 100.0

# Parameters for TformerTrajEncoder:
# ==============================================================================
TformerTrajEncoder.activation = 'leaky_relu'
TformerTrajEncoder.causal = True
TformerTrajEncoder.d_ff = 2048
TformerTrajEncoder.d_model = 512
TformerTrajEncoder.dropout_attn = 0.0
TformerTrajEncoder.dropout_emb = 0.05
TformerTrajEncoder.dropout_ff = 0.05
TformerTrajEncoder.dropout_qkv = 0.0
TformerTrajEncoder.head_scaling = True
TformerTrajEncoder.n_heads = 8
TformerTrajEncoder.n_layers = 3
TformerTrajEncoder.norm = 'layer'
TformerTrajEncoder.normformer_norms = True
TformerTrajEncoder.sigma_reparam = True

# Parameters for TimestepTransformer:
# ==============================================================================
# None.

# Parameters for TokenEmbedding:
# ==============================================================================
# None.

# Parameters for TransformerTurnEmbedding:
# ==============================================================================
TransformerTurnEmbedding.dropout = 0.05