megalado
add args.json required by generate.py
10dfbcb
{
"adam_beta2": 0.999,
"arch": "trans_enc",
"avg_model_beta": 0.9999,
"batch_size": 64,
"cond_mask_prob": 0.1,
"context_len": 0,
"cuda": true,
"data_dir": "",
"dataset": "humanml",
"device": 0,
"diffusion_steps": 50,
"emb_before_mask": false,
"emb_trans_dec": false,
"eval_batch_size": 32,
"eval_during_training": true,
"eval_num_samples": 1000,
"eval_rep_times": 3,
"eval_split": "test",
"gen_during_training": true,
"gen_guidance_param": 2.5,
"gen_num_repetitions": 2,
"gen_num_samples": 3,
"keyframe_cond_prob": 0.5,
"keyframe_cond_type": "",
"lambda_fc": 0.0,
"lambda_rcxyz": 0.0,
"lambda_vel": 0.0,
"latent_dim": 512,
"layers": 8,
"log_interval": 1000,
"lr": 0.0001,
"lr_anneal_steps": 0,
"mask_frames": false,
"noise_schedule": "cosine",
"num_frames": 60,
"num_steps": 600000,
"overwrite": true,
"pos_embed_max_len": 5000,
"pred_len": 0,
"resume_checkpoint": "",
"save_dir": "save/BaselinesMain_DiffusionSteps_50",
"save_interval": 50000,
"seed": 10,
"sigma_small": true,
"text_encoder_type": "clip",
"train_platform_type": "WandBPlatform",
"unconstrained": false,
"use_ema": false,
"weight_decay": 0.0
}