poisson-0.32 / config.yaml
joseph-tennyson's picture
Upload folder using huggingface_hub
92bd930 verified
config: /root/in-context-learning-GLM/z_pois.yaml
model:
family: gpt2
n_dims: 10
n_embd: 256
n_head: 8
n_layer: 12
n_positions: 40
out_dir: /root/in-context-learning-GLM/models/poisson-0.32
test_run: false
training:
batch_size: 256
curriculum:
dims:
end: 10
inc: 0
interval: 10000
start: 10
points:
end: 40
inc: 0
interval: 10000
start: 40
data: gaussian
keep_every_steps: 1000
learning_rate: 0.00025
num_tasks: 250000000
num_training_examples: null
resume_id: poisson-0.32
save_every_steps: 1000
task: GLM
task_kwargs:
function_type: poisson
scaling: 0.32
train_steps: 8000
wandb:
entity: in-context
log_every_steps: 10
name: poisson-0.32
notes: ICL GLM training
project: in-context-training