# config.yaml #project_name: "Exp" hyperxs: lora_attn_dim: 64 n_cross_attn_tokens: 8 latent_feature_dim: 512 out_proj_dim: 128 # model: # feature_dim: 768 #n_layersX: 24 data: dataset_name: "CMS" # data_path: './ft-training_set/math_7k.json' data_path: 'ft-training_set/commonsense_147k.json' val_set_size: 512 training: learning_rate: 1e-4 per_device_train_batch_size: 16 per_device_eval_batch_size: 32 max_steps: -1 num_train_epochs: 2 logging_steps: 50 eval_strategy: steps eval_steps: 50 report_to: wandb infer: # datasets: ['boolq'] eval_batch_size: 128 seed: 42