Sequence
int64
1
25.2k
Time
int64
1
858M
File
stringclasses
830 values
RangeOffset
int64
0
2.21M
RangeLength
int64
0
168k
Text
stringlengths
1
4.7M
Language
stringclasses
20 values
Type
stringclasses
9 values
580
1,517,684
TERMINAL
0
0
sh slurm/jobs/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch
null
terminal_output
581
1,518,072
TERMINAL
0
0
yolo-runs/sampling.sh /storage/user/mahajanm/Projects/world-modeling/checkpoints/causal/overfit-oai-sample-actionspace-1/interactive/1393544
null
terminal_output
582
1,521,274
TERMINAL
0
0
\r(jafar) ]0;mahajanm@node17: ~/Projects/jafarmahajanm@node17:~/Projects/jafar$ queue\r\n\r
null
terminal_output
583
1,522,171
TERMINAL
0
0
sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /storage/user/mahajanm/Projects/world-modeling/checkpoints/causal/overfit-oai-sample-actionspace-1/interactive/1393544
null
terminal_output
584
1,522,379
TERMINAL
0
0
\r(jafar) ]0;mahajanm@node17: ~/Projects/jafarmahajanm@node17:~/Projects/jafar$ overfit_sample/causal/dynamics_overfit_sample.sbatch\r\n\r
null
terminal_output
585
1,523,289
TERMINAL
0
0
null
terminal_output
586
1,525,610
TERMINAL
0
0
[?25l&[?25h
null
terminal_output
587
1,525,830
TERMINAL
0
0
[?25l&[?25h
null
terminal_output
588
1,525,936
TERMINAL
0
0
[?25l [?25h
null
terminal_output
589
1,527,309
TERMINAL
0
0
sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /storage/user/mahajanm/Projects/world-modeling/checkpoints/causal/overfit-oai-sample-actionspace-1/interactive/1393544
null
terminal_output
590
1,530,124
TERMINAL
0
0
sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /storage/user/mahajanm/Projects/world-modeling/checkpoints/causal/overfit-oai-sample-actionspace-1/interactive/1393544\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/storage/slurm/mahajanm/yoloruns/%x_%j.log\r\n#SBATCH --error=/storage/slurm/mahajanm/yoloruns/%x_%j.log\r\n#SBATCH --job-name=train_dynamics_overfit_sample_causal_actionspace-1\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir=.\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/storage/user/mahajanm/Projects/world-modeling/checkpoints/causal/overfit-oai-sample-actionspace-1/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\n# tokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\ntokenizer_ckpt_dir=/storage/user/mahajanm/Projects/world-modeling/checkpoints/tokenizer_ckpt\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --num_steps=2000 \\r\n --warmup_steps=0 \\r\n --wsd_decay_steps=0 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=1 \\r\n --init_lr=1e-4 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=1000 \\r\n --num_latent_actions=1 \\r\n --log \\r\n --log_checkpoint_interval=1000 \\r\n --name=dynamics-causal-overfit-actionspace-1-$slurm_job_id \\r\n --tags dynamics causal overfit \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir \\r\n --dyna_dim=128 \\r\n --dyna_num_blocks=2 \\r\n --dyna_num_heads=4\r\n slurm/jobs/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch: 16: module: not found\r\nslurm/jobs/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch: 17: module: not found\r\nSLURM_STEP_NODELIST=node17\r\nSLURM_JOB_USER=mahajanm\r\nSLURM_JOB_GPUS=0\r\nSLURM_JOBID=1393544\r\nSLURM_PTY_PORT=39621\r\nSLURM_JOB_QOS=stud\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_SRUN_COMM_PORT=42571\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_TOPOLOGY_ADDR_PATTERN=node\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_JOB_START_TIME=1753197754\r\nSLURM_JOB_CPUS_PER_NODE=5\r\nSLURM_JOB_NAME=interactive\r\nSLURM_JOB_GID=20909\r\nSLURM_CPUS_ON_NODE=5\r\nSLURM_PROCID=0\r\nSLURM_JOB_ACCOUNT=stud\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_CONF=/var/spool/slurmd/conf-cache/slurm.conf\r\nSLURM_STEP_LAUNCHER_PORT=42571\r\nSLURM_SUBMIT_HOST=atcremers51\r\nSLURM_MPI_TYPE=none\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_NODELIST=node17\r\nSLURM_NNODES=1\r\nSLURM_JOB_ID=1393544\r\nSLURMD_NODENAME=node17\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NODELIST=node17\r\nSLURM_GTIDS=0\r\nSLURM_STEPID=4294967290\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_JOB_END_TIME=1753233754\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_PTY_WIN_ROW=27\r\nSLURM_JOB_UID=7389\r\nSLURM_CLUSTER_NAME=inf9\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_LOCALID=0\r\nSLURM_JOB_PARTITION=NORMAL\r\nSLURM_LAUNCH_NODE_IPADDR=131.159.18.70\r\nSLURMD_DEBUG=2\r\nSLURM_TASK_PID=3978593\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=node17\r\nSLURM_NPROCS=1\r\nSLURM_STEP_NUM_TASKS=1\r\nSLURM_SRUN_COMM_HOST=131.159.18.70\r\nSLURM_SUBMIT_DIR=/usr/stud/mahajanm/Projects/jafar\r\nSLURM_PTY_WIN_COL=184\r\nSLURM_STEP_ID=4294967290\r\nSLURM_NODEID=0\r\n
null
terminal_output
591
1,532,459
TERMINAL
0
0
/usr/stud/mahajanm/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type `<class 'numpy.dtype'>`, but the default value `<class 'jax.numpy.float32'>` has type `<class 'jax._src.numpy.scalar_types._ScalarMeta'>`. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/usr/stud/mahajanm/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type `<class 'numpy.dtype'>`, but the default value `<class 'jax.numpy.bfloat16'>` has type `<class 'jax._src.numpy.scalar_types._ScalarMeta'>`. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n
null
terminal_output
592
1,535,040
TERMINAL
0
0
2025-07-22 18:12:40.651862: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n
null
terminal_output
593
1,544,996
TERMINAL
0
0
2025-07-22 18:12:50.505524: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n
null
terminal_output
594
1,563,641
TERMINAL
0
0
Running on 1 devices.\r\nEntering jdb:\r\n
null
terminal_output
595
1,639,715
TERMINAL
0
0
^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=1393544.4 task 0: running\r\n
null
terminal_output
596
1,639,945
TERMINAL
0
0
^Csrun: sending Ctrl-C to StepId=1393544.4\r\nsrun: forcing job termination\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 1393544.4 ON node17 CANCELLED AT 2025-07-22T18:14:25 ***\r\n
null
terminal_output
597
1,640,155
TERMINAL
0
0
^Csrun: sending Ctrl-C to StepId=1393544.4\r\nsrun: job abort in progress\r\n(jdb)
null
terminal_output
598
1,640,334
TERMINAL
0
0
\r\n]0;mahajanm@node17: /usr/stud/mahajanm/Projects/jafar[?2004h(jafar) ]0;mahajanm@node17: ~/Projects/jafarmahajanm@node17:~/Projects/jafar$
null
terminal_output
599
1,640,442
TERMINAL
0
0
^C[?2004l\r[?2004h[?2004l\r\r\n]0;mahajanm@node17: /usr/stud/mahajanm/Projects/jafar[?2004h(jafar) ]0;mahajanm@node17: ~/Projects/jafarmahajanm@node17:~/Projects/jafar$
null
terminal_output
600
1,641,214
sample.py
0
0
null
python
tab
601
1,761,592
TERMINAL
0
0
sh slurm/jobs/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch && sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /storage/user/mahajanm/Projects/world-modeling/checkpoints/causal/overfit-oai-sample-actionspace-1/interactive/1393544
null
terminal_output
602
1,765,514
TERMINAL
0
0
\r\r\n\r
null
terminal_output
603
1,767,170
TERMINAL
0
0
sh slurm/jobs/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch && sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /storage/user/mahajanm/Projects/world-modeling/checkpoints/causal/overfit-oai-sample-actionspace-1/interactive/1393544
null
terminal_output
604
1,769,374
models/dynamics.py
0
0
null
python
tab
605
1,771,741
utils/nn.py
0
0
null
python
tab
606
1,772,404
utils/nn.py
5,589
0
null
python
selection_mouse
607
1,772,404
utils/nn.py
5,588
0
null
python
selection_command
608
1,790,022
utils/nn.py
6,433
0
null
python
selection_mouse
609
1,790,025
utils/nn.py
6,432
0
null
python
selection_command
610
1,800,202
TERMINAL
0
0
bash
null
terminal_focus
611
1,801,091
TERMINAL
0
0
sh
null
terminal_focus
612
1,806,322
TERMINAL
0
0
\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/storage/slurm/mahajanm/yoloruns/%x_%j.log\r\n#SBATCH --error=/storage/slurm/mahajanm/yoloruns/%x_%j.log\r\n#SBATCH --job-name=train_dynamics_overfit_sample_causal_actionspace-1\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir=.\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/storage/user/mahajanm/Projects/world-modeling/checkpoints/causal/overfit-oai-sample-actionspace-1/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\n# tokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\ntokenizer_ckpt_dir=/storage/user/mahajanm/Projects/world-modeling/checkpoints/tokenizer_ckpt\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --num_steps=2000 \\r\n --warmup_steps=0 \\r\n --wsd_decay_steps=0 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=1 \\r\n --init_lr=1e-4 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=1000 \\r\n --num_latent_actions=1 \\r\n --log \\r\n --log_checkpoint_interval=1000 \\r\n --name=dynamics-causal-overfit-actionspace-1-$slurm_job_id \\r\n --tags dynamics causal overfit \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir \\r\n --dyna_dim=128 \\r\n --dyna_num_blocks=2 \\r\n --dyna_num_heads=4\r\n slurm/jobs/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch: 16: module: not found\r\nslurm/jobs/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch: 17: module: not found\r\nSLURM_STEP_NODELIST=node17\r\nSLURM_JOB_USER=mahajanm\r\nSLURM_JOB_GPUS=0\r\nSLURM_JOBID=1393544\r\nSLURM_PTY_PORT=39621\r\nSLURM_JOB_QOS=stud\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_SRUN_COMM_PORT=42571\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_TOPOLOGY_ADDR_PATTERN=node\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_JOB_START_TIME=1753197754\r\nSLURM_JOB_CPUS_PER_NODE=5\r\nSLURM_JOB_NAME=interactive\r\nSLURM_JOB_GID=20909\r\nSLURM_CPUS_ON_NODE=5\r\nSLURM_PROCID=0\r\nSLURM_JOB_ACCOUNT=stud\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_CONF=/var/spool/slurmd/conf-cache/slurm.conf\r\nSLURM_STEP_LAUNCHER_PORT=42571\r\nSLURM_SUBMIT_HOST=atcremers51\r\nSLURM_MPI_TYPE=none\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_NODELIST=node17\r\nSLURM_NNODES=1\r\nSLURM_JOB_ID=1393544\r\nSLURMD_NODENAME=node17\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NODELIST=node17\r\nSLURM_GTIDS=0\r\nSLURM_STEPID=4294967290\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_JOB_END_TIME=1753233754\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_PTY_WIN_ROW=27\r\nSLURM_JOB_UID=7389\r\nSLURM_CLUSTER_NAME=inf9\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_LOCALID=0\r\nSLURM_JOB_PARTITION=NORMAL\r\nSLURM_LAUNCH_NODE_IPADDR=131.159.18.70\r\nSLURMD_DEBUG=2\r\nSLURM_TASK_PID=3978593\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=node17\r\nSLURM_NPROCS=1\r\nSLURM_STEP_NUM_TASKS=1\r\nSLURM_SRUN_COMM_HOST=131.159.18.70\r\nSLURM_SUBMIT_DIR=/usr/stud/mahajanm/Projects/jafar\r\nSLURM_PTY_WIN_COL=184\r\nSLURM_STEP_ID=4294967290\r\nSLURM_NODEID=0\r\n
null
terminal_output
613
1,809,006
TERMINAL
0
0
/usr/stud/mahajanm/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type `<class 'numpy.dtype'>`, but the default value `<class 'jax.numpy.float32'>` has type `<class 'jax._src.numpy.scalar_types._ScalarMeta'>`. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/usr/stud/mahajanm/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type `<class 'numpy.dtype'>`, but the default value `<class 'jax.numpy.bfloat16'>` has type `<class 'jax._src.numpy.scalar_types._ScalarMeta'>`. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n
null
terminal_output
614
1,811,550
TERMINAL
0
0
2025-07-22 18:17:17.138119: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n
null
terminal_output
615
1,821,558
TERMINAL
0
0
2025-07-22 18:17:27.170897: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n
null
terminal_output
616
1,839,904
TERMINAL
0
0
Running on 1 devices.\r\nEntering jdb:\r\n
null
terminal_output
617
1,843,517
train_dynamics.py
0
0
null
python
tab
618
1,844,588
models/dynamics.py
0
0
null
python
tab
619
1,846,935
models/dynamics.py
3,602
0
null
python
selection_mouse
620
1,847,805
models/dynamics.py
3,580
31
null
python
content
621
1,849,577
TERMINAL
0
0
^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=1393544.5 task 0: running\r\n
null
terminal_output
622
1,849,766
TERMINAL
0
0
^Csrun: sending Ctrl-C to StepId=1393544.5\r\nsrun: forcing job termination\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 1393544.5 ON node17 CANCELLED AT 2025-07-22T18:17:55 ***\r\n
null
terminal_output
623
1,850,007
TERMINAL
0
0
(jdb)
null
terminal_output
624
1,850,076
TERMINAL
0
0
\r\n]0;mahajanm@node17: /usr/stud/mahajanm/Projects/jafar[?2004h(jafar) ]0;mahajanm@node17: ~/Projects/jafarmahajanm@node17:~/Projects/jafar$
null
terminal_output
625
1,850,213
TERMINAL
0
0
^C[?2004l\r[?2004h[?2004l\r\r\n]0;mahajanm@node17: /usr/stud/mahajanm/Projects/jafar[?2004h(jafar) ]0;mahajanm@node17: ~/Projects/jafarmahajanm@node17:~/Projects/jafar$
null
terminal_output
626
1,850,594
TERMINAL
0
0
sh slurm/jobs/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch && sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /storage/user/mahajanm/Projects/world-modeling/checkpoints/causal/overfit-oai-sample-actionspace-1/interactive/1393544
null
terminal_output
627
1,850,850
TERMINAL
0
0
\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/storage/slurm/mahajanm/yoloruns/%x_%j.log\r\n#SBATCH --error=/storage/slurm/mahajanm/yoloruns/%x_%j.log\r\n#SBATCH --job-name=train_dynamics_overfit_sample_causal_actionspace-1\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir=.\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/storage/user/mahajanm/Projects/world-modeling/checkpoints/causal/overfit-oai-sample-actionspace-1/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\n# tokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\ntokenizer_ckpt_dir=/storage/user/mahajanm/Projects/world-modeling/checkpoints/tokenizer_ckpt\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --num_steps=2000 \\r\n --warmup_steps=0 \\r\n --wsd_decay_steps=0 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=1 \\r\n --init_lr=1e-4 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=1000 \\r\n --num_latent_actions=1 \\r\n --log \\r\n --log_checkpoint_interval=1000 \\r\n --name=dynamics-causal-overfit-actionspace-1-$slurm_job_id \\r\n --tags dynamics causal overfit \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir \\r\n --dyna_dim=128 \\r\n --dyna_num_blocks=2 \\r\n --dyna_num_heads=4\r\n slurm/jobs/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch: 16: module: not found\r\nslurm/jobs/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch: 17: module: not found\r\nSLURM_STEP_NODELIST=node17\r\nSLURM_JOB_USER=mahajanm\r\nSLURM_JOB_GPUS=0\r\nSLURM_JOBID=1393544\r\nSLURM_PTY_PORT=39621\r\nSLURM_JOB_QOS=stud\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_SRUN_COMM_PORT=42571\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_TOPOLOGY_ADDR_PATTERN=node\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_JOB_START_TIME=1753197754\r\nSLURM_JOB_CPUS_PER_NODE=5\r\nSLURM_JOB_NAME=interactive\r\nSLURM_JOB_GID=20909\r\nSLURM_CPUS_ON_NODE=5\r\nSLURM_PROCID=0\r\nSLURM_JOB_ACCOUNT=stud\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_CONF=/var/spool/slurmd/conf-cache/slurm.conf\r\nSLURM_STEP_LAUNCHER_PORT=42571\r\nSLURM_SUBMIT_HOST=atcremers51\r\nSLURM_MPI_TYPE=none\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_NODELIST=node17\r\nSLURM_NNODES=1\r\nSLURM_JOB_ID=1393544\r\nSLURMD_NODENAME=node17\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NODELIST=node17\r\nSLURM_GTIDS=0\r\nSLURM_STEPID=4294967290\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_JOB_END_TIME=1753233754\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_PTY_WIN_ROW=27\r\nSLURM_JOB_UID=7389\r\nSLURM_CLUSTER_NAME=inf9\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_LOCALID=0\r\nSLURM_JOB_PARTITION=NORMAL\r\nSLURM_LAUNCH_NODE_IPADDR=131.159.18.70\r\nSLURMD_DEBUG=2\r\nSLURM_TASK_PID=3978593\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=node17\r\nSLURM_NPROCS=1\r\nSLURM_STEP_NUM_TASKS=1\r\nSLURM_SRUN_COMM_HOST=131.159.18.70\r\nSLURM_SUBMIT_DIR=/usr/stud/mahajanm/Projects/jafar\r\nSLURM_PTY_WIN_COL=184\r\nSLURM_STEP_ID=4294967290\r\nSLURM_NODEID=0\r\n
null
terminal_output
628
1,852,529
utils/nn.py
0
0
null
python
tab
629
1,853,214
TERMINAL
0
0
/usr/stud/mahajanm/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type `<class 'numpy.dtype'>`, but the default value `<class 'jax.numpy.float32'>` has type `<class 'jax._src.numpy.scalar_types._ScalarMeta'>`. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/usr/stud/mahajanm/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type `<class 'numpy.dtype'>`, but the default value `<class 'jax.numpy.bfloat16'>` has type `<class 'jax._src.numpy.scalar_types._ScalarMeta'>`. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n
null
terminal_output
630
1,853,402
train_dynamics.py
0
0
null
python
tab
631
1,853,871
models/dynamics.py
0
0
null
python
tab
632
1,855,239
train_dynamics.py
0
0
null
python
tab
633
1,855,728
TERMINAL
0
0
2025-07-22 18:18:01.316649: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n
null
terminal_output
634
1,865,707
TERMINAL
0
0
2025-07-22 18:18:11.292032: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n
null
terminal_output
635
1,884,155
TERMINAL
0
0
Running on 1 devices.\r\nEntering jdb:\r\n
null
terminal_output
636
1,890,095
train_dynamics.py
0
0
null
python
tab
637
1,898,476
TERMINAL
0
0
^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=1393544.6 task 0: running\r\n
null
terminal_output
638
1,898,689
TERMINAL
0
0
^Csrun: sending Ctrl-C to StepId=1393544.6\r\nsrun: forcing job termination\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 1393544.6 ON node17 CANCELLED AT 2025-07-22T18:18:44 ***\r\n
null
terminal_output
639
1,898,840
TERMINAL
0
0
^Csrun: sending Ctrl-C to StepId=1393544.6\r\nsrun: job abort in progress\r\n
null
terminal_output
640
1,898,922
TERMINAL
0
0
(jdb)
null
terminal_output
641
1,899,040
TERMINAL
0
0
\r\n]0;mahajanm@node17: /usr/stud/mahajanm/Projects/jafar[?2004h(jafar) ]0;mahajanm@node17: ~/Projects/jafarmahajanm@node17:~/Projects/jafar$ ^C[?2004l\r[?2004h[?2004l\r\r\n]0;mahajanm@node17: /usr/stud/mahajanm/Projects/jafar[?2004h(jafar) ]0;mahajanm@node17: ~/Projects/jafarmahajanm@node17:~/Projects/jafar$
null
terminal_output
642
1,900,109
models/dynamics.py
0
0
null
python
tab
643
1,900,884
models/dynamics.py
3,467
0
null
python
selection_mouse
644
1,900,884
models/dynamics.py
3,466
0
null
python
selection_command
645
1,919,471
TERMINAL
0
0
sh slurm/jobs/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch && sh slurm/jobs/mihir/horeka/yolo-runs/sampling.sh /storage/user/mahajanm/Projects/world-modeling/checkpoints/causal/overfit-oai-sample-actionspace-1/interactive/1393544
null
terminal_output
646
1,921,272
TERMINAL
0
0
\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/storage/slurm/mahajanm/yoloruns/%x_%j.log\r\n#SBATCH --error=/storage/slurm/mahajanm/yoloruns/%x_%j.log\r\n#SBATCH --job-name=train_dynamics_overfit_sample_causal_actionspace-1\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir=.\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/storage/user/mahajanm/Projects/world-modeling/checkpoints/causal/overfit-oai-sample-actionspace-1/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\n# tokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\r\ntokenizer_ckpt_dir=/storage/user/mahajanm/Projects/world-modeling/checkpoints/tokenizer_ckpt\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --num_steps=2000 \\r\n --warmup_steps=0 \\r\n --wsd_decay_steps=0 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=1 \\r\n --init_lr=1e-4 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=1000 \\r\n --num_latent_actions=1 \\r\n --log \\r\n --log_checkpoint_interval=1000 \\r\n --name=dynamics-causal-overfit-actionspace-1-$slurm_job_id \\r\n --tags dynamics causal overfit \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir \\r\n --dyna_dim=128 \\r\n --dyna_num_blocks=2 \\r\n --dyna_num_heads=4\r\n slurm/jobs/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch: 16: module: not found\r\nslurm/jobs/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch: 17: module: not found\r\nSLURM_STEP_NODELIST=node17\r\nSLURM_JOB_USER=mahajanm\r\nSLURM_JOB_GPUS=0\r\nSLURM_JOBID=1393544\r\nSLURM_PTY_PORT=39621\r\nSLURM_JOB_QOS=stud\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_SRUN_COMM_PORT=42571\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_TOPOLOGY_ADDR_PATTERN=node\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_JOB_START_TIME=1753197754\r\nSLURM_JOB_CPUS_PER_NODE=5\r\nSLURM_JOB_NAME=interactive\r\nSLURM_JOB_GID=20909\r\nSLURM_CPUS_ON_NODE=5\r\nSLURM_PROCID=0\r\nSLURM_JOB_ACCOUNT=stud\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_CONF=/var/spool/slurmd/conf-cache/slurm.conf\r\nSLURM_STEP_LAUNCHER_PORT=42571\r\nSLURM_SUBMIT_HOST=atcremers51\r\nSLURM_MPI_TYPE=none\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_NODELIST=node17\r\nSLURM_NNODES=1\r\nSLURM_JOB_ID=1393544\r\nSLURMD_NODENAME=node17\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NODELIST=node17\r\nSLURM_GTIDS=0\r\nSLURM_STEPID=4294967290\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_JOB_END_TIME=1753233754\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_PTY_WIN_ROW=27\r\nSLURM_JOB_UID=7389\r\nSLURM_CLUSTER_NAME=inf9\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_LOCALID=0\r\nSLURM_JOB_PARTITION=NORMAL\r\nSLURM_LAUNCH_NODE_IPADDR=131.159.18.70\r\nSLURMD_DEBUG=2\r\nSLURM_TASK_PID=3978593\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=node17\r\nSLURM_NPROCS=1\r\nSLURM_STEP_NUM_TASKS=1\r\nSLURM_SRUN_COMM_HOST=131.159.18.70\r\nSLURM_SUBMIT_DIR=/usr/stud/mahajanm/Projects/jafar\r\nSLURM_PTY_WIN_COL=184\r\nSLURM_STEP_ID=4294967290\r\nSLURM_NODEID=0\r\n
null
terminal_output
647
1,923,892
TERMINAL
0
0
/usr/stud/mahajanm/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type `<class 'numpy.dtype'>`, but the default value `<class 'jax.numpy.float32'>` has type `<class 'jax._src.numpy.scalar_types._ScalarMeta'>`. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/usr/stud/mahajanm/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type `<class 'numpy.dtype'>`, but the default value `<class 'jax.numpy.bfloat16'>` has type `<class 'jax._src.numpy.scalar_types._ScalarMeta'>`. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n
null
terminal_output
648
1,926,549
TERMINAL
0
0
2025-07-22 18:19:12.041701: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n
null
terminal_output
649
1,936,397
TERMINAL
0
0
2025-07-22 18:19:21.961951: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n
null
terminal_output
650
1,955,880
TERMINAL
0
0
2025-07-22 18:19:41.439414: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n
null
terminal_output
651
1,959,164
TERMINAL
0
0
wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n
null
terminal_output
652
1,959,869
TERMINAL
0
0
wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /usr/stud/mahajanm/Projects/jafar/wandb/run-20250722_181944-8r0fks74\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-causal-overfit-actionspace-1-1393544\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/8r0fks74\r\n
null
terminal_output
653
1,961,794
TERMINAL
0
0
WARNING:absl:Missing metrics for step 146000\r\nERROR:absl:File /storage/user/mahajanm/Projects/world-modeling/checkpoints/tokenizer_ckpt/146000/metrics/metrics not found.\r\n
null
terminal_output
654
1,966,407
TERMINAL
0
0
^[
null
terminal_output
655
1,966,681
TERMINAL
0
0
^[
null
terminal_output
656
1,967,083
TERMINAL
0
0
^[
null
terminal_output
657
1,967,291
TERMINAL
0
0
^[
null
terminal_output
658
1,967,429
TERMINAL
0
0
^[
null
terminal_output
659
1,979,592
TERMINAL
0
0
2025-07-22 18:20:05.170525: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-22 18:20:05.170945: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-22 18:20:05.172851: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-22 18:20:05.172877: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-22 18:20:05.173813: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n
null
terminal_output
660
2,044,400
TERMINAL
0
0
Running on 1 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 37989616, 'lam': 19349312, 'dynamics': 583168, 'total': 57922096}\r\nStep 0, loss: 9.480460166931152\r\nStep 1, loss: 9.17424201965332\r\nStep 2, loss: 8.904892921447754\r\nStep 3, loss: 8.671215057373047\r\nStep 4, loss: 8.47729206085205\r\nStep 5, loss: 8.312150001525879\r\nStep 6, loss: 8.167618751525879\r\nStep 7, loss: 8.042706489562988\r\nStep 8, loss: 7.934808731079102\r\nStep 9, loss: 7.845317840576172\r\nStep 10, loss: 7.76653528213501\r\nStep 11, loss: 7.698042392730713\r\nStep 12, loss: 7.638092041015625\r\nStep 13, loss: 7.58504581451416\r\nStep 14, loss: 7.536530017852783\r\nStep 15, loss: 7.491424083709717\r\nStep 16, loss: 7.448380947113037\r\nStep 17, loss: 7.406976222991943\r\nStep 18, loss: 7.367974758148193\r\nStep 19, loss: 7.331673622131348\r\nStep 20, loss: 7.297691822052002\r\nStep 21, loss: 7.26692008972168\r\nStep 22, loss: 7.239825248718262\r\nStep 23, loss: 7.2149553298950195\r\nStep 24, loss: 7.191579341888428\r\nStep 25, loss: 7.168036460876465\r\nStep 26, loss: 7.145593643188477\r\nStep 27, loss: 7.12391471862793\r\nStep 28, loss: 7.102698802947998\r\nStep 29, loss: 7.083007335662842\r\nStep 30, loss: 7.063911437988281\r\nStep 31, loss: 7.045735836029053\r\nStep 32, loss: 7.027525901794434\r\nStep 33, loss: 7.009570121765137\r\nStep 34, loss: 6.991521835327148\r\nStep 35, loss: 6.973966121673584\r\nStep 36, loss: 6.957358360290527\r\nStep 37, loss: 6.94111967086792\r\nStep 38, loss: 6.926177024841309\r\nStep 39, loss: 6.911700248718262\r\nStep 40, loss: 6.8961381912231445\r\nStep 41, loss: 6.8810715675354\r\nStep 42, loss: 6.865952014923096\r\nStep 43, loss: 6.851129055023193\r\nStep 44, loss: 6.836585521697998\r\nStep 45, loss: 6.822129249572754\r\nStep 46, loss: 6.808188438415527\r\nStep 47, loss: 6.794126510620117\r\nStep 48, loss: 6.779969215393066\r\nStep 49, loss: 6.765874862670898\r\nStep 50, loss: 6.7518415451049805\r\nStep 51, loss: 6.737873554229736\r\nStep 52, loss: 6.724252223968506\r\nStep 53, loss: 6.710564136505127\r\nStep 54, loss: 6.6972761154174805\r\nStep 55, loss: 6.6834893226623535\r\nStep 56, loss: 6.669681549072266\r\nStep 57, loss: 6.656223297119141\r\nStep 58, loss: 6.642978191375732\r\nStep 59, loss: 6.629431247711182\r\nStep 60, loss: 6.615810394287109\r\nStep 61, loss: 6.602208137512207\r\nStep 62, loss: 6.589109897613525\r\nStep 63, loss: 6.575628280639648\r\nStep 64, loss: 6.5621137619018555\r\nStep 65, loss: 6.548994541168213\r\nStep 66, loss: 6.53590726852417\r\nStep 67, loss: 6.522897243499756\r\nStep 68, loss: 6.5100531578063965\r\nStep 69, loss: 6.4974141120910645\r\nStep 70, loss: 6.48438835144043\r\nStep 71, loss: 6.47168493270874\r\nStep 72, loss: 6.459214210510254\r\nStep 73, loss: 6.446499824523926\r\nStep 74, loss: 6.433981895446777\r\nStep 75, loss: 6.4213948249816895\r\nStep 76, loss: 6.4089202880859375\r\nStep 77, loss: 6.3962860107421875\r\nStep 78, loss: 6.384078502655029\r\nStep 79, loss: 6.371949195861816\r\nStep 80, loss: 6.359350681304932\r\nStep 81, loss: 6.3472113609313965\r\nStep 82, loss: 6.335168838500977\r\nStep 83, loss: 6.323110103607178\r\nStep 84, loss: 6.311218738555908\r\nStep 85, loss: 6.299188613891602\r\nStep 86, loss: 6.287357807159424\r\nStep 87, loss: 6.275651454925537\r\nStep 88, loss: 6.264026641845703\r\nStep 89, loss: 6.25237512588501\r\nStep 90, loss: 6.240903377532959\r\nStep 91, loss: 6.229410171508789\r\nStep 92, loss: 6.217897891998291\r\nStep 93, loss: 6.206582069396973\r\nStep 94, loss: 6.195414066314697\r\nStep 95, loss: 6.184123516082764\r\nStep 96, loss: 6.172741889953613\r\nStep 97, loss: 6.1617255210876465\r\nStep 98, loss: 6.150881767272949\r\nStep 99, loss: 6.139917850494385\r\nStep 100, loss: 6.128978729248047\r\nStep 101, loss: 6.118105888366699\r\nStep 102, loss: 6.107480525970459\r\nStep 103, loss: 6.0969109535217285\r\nStep 104, loss: 6.086226940155029\r\nStep 105, loss: 6.0756025314331055\r\nStep 106, loss: 6.065127849578857\r\nStep 107, loss: 6.054866790771484\r\nStep 108, loss: 6.0446553230285645\r\nStep 109, loss: 6.034468173980713\r\nStep 110, loss: 6.024503707885742\r\nStep 111, loss: 6.014429092407227\r\nStep 112, loss: 6.004430294036865\r\nStep 113, loss: 5.994698524475098\r\nStep 114, loss: 5.984843730926514\r\nStep 115, loss: 5.9751296043396\r\nStep 116, loss: 5.965479373931885\r\nStep 117, loss: 5.9559550285339355\r\nStep 118, loss: 5.946455478668213\r\nStep 119, loss: 5.937121868133545\r\nStep 120, loss: 5.927791595458984\r\nStep 121, loss: 5.918332576751709\r\nStep 122, loss: 5.9091997146606445\r\nStep 123, loss: 5.8999457359313965\r\nStep 124, loss: 5.890802383422852\r\nStep 125, loss: 5.881682872772217\r\nStep 126, loss: 5.872680187225342\r\nStep 127, loss: 5.863688945770264\r\nStep 128, loss: 5.854817867279053\r\nStep 129, loss: 5.845907688140869\r\nStep 130, loss: 5.8370561599731445\r\nStep 131, loss: 5.828339576721191\r\nStep 132, loss: 5.819582939147949\r\nStep 133, loss: 5.810811519622803\r\nStep 134, loss: 5.802179336547852\r\nStep 135, loss: 5.7933430671691895\r\nStep 136, loss: 5.7846832275390625\r\nStep 137, loss: 5.776198863983154\r\nStep 138, loss: 5.767621994018555\r\nStep 139, loss: 5.7591657638549805\r\nStep 140, loss: 5.750500679016113\r\nStep 141, loss: 5.742275238037109\r\nStep 142, loss: 5.733560562133789\r\nStep 143, loss: 5.725162506103516\r\nStep 144, loss: 5.716525554656982\r\nStep 145, loss: 5.707938194274902\r\nStep 146, loss: 5.699605464935303\r\nStep 147, loss: 5.690977096557617\r\nStep 148, loss: 5.682668209075928\r\nStep 149, loss: 5.674150466918945\r\nStep 150, loss: 5.6655755043029785\r\nStep 151, loss: 5.657305717468262\r\nStep 152, loss: 5.648704528808594\r\nStep 153, loss: 5.6407012939453125\r\nStep 154, loss: 5.632330417633057\r\nStep 155, loss: 5.623915672302246\r\nStep 156, loss: 5.615757465362549\r\nStep 157, loss: 5.607680797576904\r\nStep 158, loss: 5.599198341369629\r\nStep 159, loss: 5.591301918029785\r\nStep 160, loss: 5.583052158355713\r\nStep 161, loss: 5.574908256530762\r\nStep 162, loss: 5.566863536834717\r\nStep 163, loss: 5.558355808258057\r\nStep 164, loss: 5.55031681060791\r\nStep 165, loss: 5.5418548583984375\r\nStep 166, loss: 5.533441543579102\r\nStep 167, loss: 5.524908542633057\r\nStep 168, loss: 5.516613006591797\r\nStep 169, loss: 5.508413791656494\r\nStep 170, loss: 5.499752044677734\r\nStep 171, loss: 5.491381645202637\r\nStep 172, loss: 5.48295783996582\r\nStep 173, loss: 5.474972248077393\r\nStep 174, loss: 5.4664177894592285\r\nStep 175, loss: 5.457925796508789\r\nStep 176, loss: 5.449809551239014\r\nStep 177, loss: 5.441255569458008\r\nStep 178, loss: 5.432647705078125\r\nStep 179, loss: 5.424334526062012\r\nStep 180, loss: 5.415771961212158\r\nStep 181, loss: 5.407241344451904\r\nStep 182, loss: 5.398848056793213\r\nStep 183, loss: 5.3901824951171875\r\nStep 184, loss: 5.381723403930664\r\nStep 185, loss: 5.372879981994629\r\nStep 186, loss: 5.364157199859619\r\nStep 187, loss: 5.355564594268799\r\nStep 188, loss: 5.346704959869385\r\nStep 189, loss: 5.338001728057861\r\nStep 190, loss: 5.329338550567627\r\nStep 191, loss: 5.320389747619629\r\nStep 192, loss: 5.3116865158081055\r\nStep 193, loss: 5.302977085113525\r\nStep 194, loss: 5.294025897979736\r\nStep 195, loss: 5.285546779632568\r\nStep 196, loss: 5.276534557342529\r\nStep 197, loss: 5.267711639404297\r\nStep 198, loss: 5.259095668792725\r\nStep 199, loss: 5.25033712387085\r\nStep 200, loss: 5.241352081298828\r\nStep 201, loss: 5.232487678527832\r\nStep 202, loss: 5.223821640014648\r\nStep 203, loss: 5.214844226837158\r\nStep 204, loss: 5.206001281738281\r\nStep 205, loss: 5.197220802307129\r\nStep 206, loss: 5.188520908355713\r\nStep 207, loss: 5.179464340209961\r\nStep 208, loss: 5.170515537261963\r\nStep 209, loss: 5.161584854125977\r\nStep 210, loss: 5.152987480163574\r\nStep 211, loss: 5.1439290046691895\r\nStep 212, loss: 5.13511848449707\r\nStep 213, loss: 5.126303195953369\r\nStep 214, loss: 5.117377281188965\r\nStep 215, loss: 5.108239650726318\r\nStep 216, loss: 5.099483013153076\r\nStep 217, loss: 5.090365409851074\r\nStep 218, loss: 5.0814409255981445\r\nStep 219, loss: 5.072748184204102\r\nStep 220, loss: 5.063522815704346\r\nStep 221, loss: 5.054193496704102\r\nStep 222, loss: 5.045683860778809\r\nStep 223, loss: 5.036563396453857\r\nStep 224, loss: 5.027276039123535\r\nStep 225, loss: 5.018744468688965\r\nStep 226, loss: 5.009485244750977\r\nStep 227, loss: 5.0005974769592285\r\nStep 228, loss: 4.991429328918457\r\nStep 229, loss: 4.982512950897217\r\nStep 230, loss: 4.973666667938232\r\nStep 231, loss: 4.964373588562012\r\nStep 232, loss: 4.955423831939697\r\nStep 233, loss: 4.946341514587402\r\nStep 234, loss: 4.937288761138916\r\nStep 235, loss: 4.928089618682861\r\nStep 236, loss: 4.918966293334961\r\nStep 237, loss: 4.909922122955322\r\n
null
terminal_output
661
2,066,019
TERMINAL
0
0
Step 238, loss: 4.900802135467529\r\nStep 239, loss: 4.891537666320801\r\nStep 240, loss: 4.882425308227539\r\nStep 241, loss: 4.8734660148620605\r\nStep 242, loss: 4.864076614379883\r\nStep 243, loss: 4.854644298553467\r\nStep 244, loss: 4.845699787139893\r\nStep 245, loss: 4.836607456207275\r\nStep 246, loss: 4.826932430267334\r\nStep 247, loss: 4.817652225494385\r\nStep 248, loss: 4.808806896209717\r\nStep 249, loss: 4.799525260925293\r\nStep 250, loss: 4.790092945098877\r\nStep 251, loss: 4.781068325042725\r\nStep 252, loss: 4.771703720092773\r\nStep 253, loss: 4.762415885925293\r\nStep 254, loss: 4.753512382507324\r\nStep 255, loss: 4.744155406951904\r\nStep 256, loss: 4.734983921051025\r\nStep 257, loss: 4.725468635559082\r\nStep 258, loss: 4.716409683227539\r\nStep 259, loss: 4.7068376541137695\r\nStep 260, loss: 4.697548866271973\r\nStep 261, loss: 4.688360691070557\r\nStep 262, loss: 4.67854118347168\r\nStep 263, loss: 4.6695637702941895\r\nStep 264, loss: 4.6601996421813965\r\nStep 265, loss: 4.650557518005371\r\nStep 266, loss: 4.641153335571289\r\nStep 267, loss: 4.631518840789795\r\nStep 268, loss: 4.622035980224609\r\nStep 269, loss: 4.612715244293213\r\nStep 270, loss: 4.6026291847229\r\nStep 271, loss: 4.593733787536621\r\nStep 272, loss: 4.584788799285889\r\nStep 273, loss: 4.574825763702393\r\nStep 274, loss: 4.565671443939209\r\nStep 275, loss: 4.556609153747559\r\nStep 276, loss: 4.546914100646973\r\nStep 277, loss: 4.53835391998291\r\nStep 278, loss: 4.528618812561035\r\nStep 279, loss: 4.519596099853516\r\nStep 280, loss: 4.51027774810791\r\nStep 281, loss: 4.500893592834473\r\nStep 282, loss: 4.4919047355651855\r\nStep 283, loss: 4.482218265533447\r\nStep 284, loss: 4.473264694213867\r\nStep 285, loss: 4.463583469390869\r\nStep 286, loss: 4.453956604003906\r\nStep 287, loss: 4.444716930389404\r\nStep 288, loss: 4.435055255889893\r\nStep 289, loss: 4.425739288330078\r\nStep 290, loss: 4.416041374206543\r\nStep 291, loss: 4.406640529632568\r\nStep 292, loss: 4.3968353271484375\r\nStep 293, loss: 4.386941909790039\r\nStep 294, loss: 4.377396583557129\r\nStep 295, loss: 4.367946624755859\r\nStep 296, loss: 4.358292579650879\r\nStep 297, loss: 4.348360061645508\r\nStep 298, loss: 4.338955402374268\r\nStep 299, loss: 4.3294243812561035\r\nStep 300, loss: 4.319789886474609\r\nStep 301, loss: 4.309905529022217\r\nStep 302, loss: 4.300123691558838\r\nStep 303, loss: 4.29052734375\r\nStep 304, loss: 4.280849456787109\r\nStep 305, loss: 4.270991802215576\r\nStep 306, loss: 4.261135578155518\r\nStep 307, loss: 4.251252174377441\r\nStep 308, loss: 4.241791725158691\r\nStep 309, loss: 4.232276439666748\r\nStep 310, loss: 4.223031520843506\r\nStep 311, loss: 4.2125654220581055\r\nStep 312, loss: 4.203210353851318\r\nStep 313, loss: 4.193575382232666\r\nStep 314, loss: 4.184154987335205\r\nStep 315, loss: 4.1741180419921875\r\nStep 316, loss: 4.164731025695801\r\nStep 317, loss: 4.1551690101623535\r\nStep 318, loss: 4.145143985748291\r\nStep 319, loss: 4.135638236999512\r\nStep 320, loss: 4.125460624694824\r\nStep 321, loss: 4.11622428894043\r\nStep 322, loss: 4.106347560882568\r\nStep 323, loss: 4.096459865570068\r\nStep 324, loss: 4.087124824523926\r\nStep 325, loss: 4.077770233154297\r\nStep 326, loss: 4.0677385330200195\r\nStep 327, loss: 4.058280944824219\r\nStep 328, loss: 4.048334121704102\r\nStep 329, loss: 4.038717746734619\r\nStep 330, loss: 4.02905797958374\r\nStep 331, loss: 4.01910924911499\r\nStep 332, loss: 4.009538650512695\r\nStep 333, loss: 3.9999685287475586\r\nStep 334, loss: 3.990321159362793\r\nStep 335, loss: 3.9814372062683105\r\nStep 336, loss: 3.971301317214966\r\nStep 337, loss: 3.961608648300171\r\nStep 338, loss: 3.9528887271881104\r\nStep 339, loss: 3.9425196647644043\r\nStep 340, loss: 3.9333949089050293\r\nStep 341, loss: 3.9235424995422363\r\nStep 342, loss: 3.9139997959136963\r\nStep 343, loss: 3.9049596786499023\r\nStep 344, loss: 3.895594596862793\r\nStep 345, loss: 3.885329008102417\r\nStep 346, loss: 3.8764991760253906\r\nStep 347, loss: 3.8665578365325928\r\nStep 348, loss: 3.8574306964874268\r\nStep 349, loss: 3.847919225692749\r\nStep 350, loss: 3.8387179374694824\r\nStep 351, loss: 3.8287770748138428\r\nStep 352, loss: 3.8189685344696045\r\nStep 353, loss: 3.810070037841797\r\nStep 354, loss: 3.800123691558838\r\nStep 355, loss: 3.7902655601501465\r\nStep 356, loss: 3.780945301055908\r\nStep 357, loss: 3.7712900638580322\r\nStep 358, loss: 3.7613730430603027\r\nStep 359, loss: 3.751981019973755\r\nStep 360, loss: 3.7420685291290283\r\nStep 361, loss: 3.732534408569336\r\nStep 362, loss: 3.7226040363311768\r\nStep 363, loss: 3.7131166458129883\r\nStep 364, loss: 3.703486680984497\r\nStep 365, loss: 3.694000720977783\r\nStep 366, loss: 3.6844003200531006\r\nStep 367, loss: 3.673954725265503\r\nStep 368, loss: 3.6646928787231445\r\nStep 369, loss: 3.6550443172454834\r\nStep 370, loss: 3.6452085971832275\r\nStep 371, loss: 3.6361234188079834\r\nStep 372, loss: 3.626244306564331\r\nStep 373, loss: 3.616428852081299\r\nStep 374, loss: 3.6066994667053223\r\nStep 375, loss: 3.596869468688965\r\nStep 376, loss: 3.586979389190674\r\nStep 377, loss: 3.5772318840026855\r\nStep 378, loss: 3.5673375129699707\r\nStep 379, loss: 3.5579588413238525\r\nStep 380, loss: 3.547729253768921\r\nStep 381, loss: 3.5381970405578613\r\nStep 382, loss: 3.528395414352417\r\nStep 383, loss: 3.518439769744873\r\nStep 384, loss: 3.5089547634124756\r\nStep 385, loss: 3.5004587173461914\r\nStep 386, loss: 3.4898409843444824\r\nStep 387, loss: 3.4796433448791504\r\nStep 388, loss: 3.4710559844970703\r\nStep 389, loss: 3.4610753059387207\r\nStep 390, loss: 3.4516844749450684\r\nStep 391, loss: 3.4432244300842285\r\nStep 392, loss: 3.433161497116089\r\nStep 393, loss: 3.424074172973633\r\nStep 394, loss: 3.415163040161133\r\nStep 395, loss: 3.404837131500244\r\nStep 396, loss: 3.396036386489868\r\nStep 397, loss: 3.3865256309509277\r\nStep 398, loss: 3.377376079559326\r\nStep 399, loss: 3.3689520359039307\r\nStep 400, loss: 3.358548641204834\r\nStep 401, loss: 3.3500335216522217\r\nStep 402, loss: 3.3404457569122314\r\nStep 403, loss: 3.330622434616089\r\nStep 404, loss: 3.3219242095947266\r\nStep 405, loss: 3.312434673309326\r\nStep 406, loss: 3.3026907444000244\r\nStep 407, loss: 3.29374098777771\r\nStep 408, loss: 3.2837979793548584\r\nStep 409, loss: 3.2748804092407227\r\nStep 410, loss: 3.2652974128723145\r\nStep 411, loss: 3.2553582191467285\r\nStep 412, loss: 3.24627947807312\r\nStep 413, loss: 3.236337423324585\r\nStep 414, loss: 3.2268292903900146\r\nStep 415, loss: 3.217132091522217\r\nStep 416, loss: 3.2074100971221924\r\nStep 417, loss: 3.19848370552063\r\nStep 418, loss: 3.1885173320770264\r\nStep 419, loss: 3.1789989471435547\r\nStep 420, loss: 3.169036388397217\r\nStep 421, loss: 3.1593873500823975\r\nStep 422, loss: 3.150824546813965\r\nStep 423, loss: 3.1401848793029785\r\nStep 424, loss: 3.1305577754974365\r\nStep 425, loss: 3.1213154792785645\r\nStep 426, loss: 3.111396551132202\r\nStep 427, loss: 3.1020102500915527\r\nStep 428, loss: 3.0921826362609863\r\nStep 429, loss: 3.083000659942627\r\nStep 430, loss: 3.072969675064087\r\nStep 431, loss: 3.0643012523651123\r\nStep 432, loss: 3.054401397705078\r\nStep 433, loss: 3.0447378158569336\r\nStep 434, loss: 3.0354220867156982\r\nStep 435, loss: 3.025641679763794\r\nStep 436, loss: 3.0168235301971436\r\nStep 437, loss: 3.0065646171569824\r\nStep 438, loss: 2.9979569911956787\r\nStep 439, loss: 2.9892468452453613\r\nStep 440, loss: 2.9791550636291504\r\nStep 441, loss: 2.969914674758911\r\nStep 442, loss: 2.960268974304199\r\nStep 443, loss: 2.95159649848938\r\nStep 444, loss: 2.9417500495910645\r\nStep 445, loss: 2.9327378273010254\r\nStep 446, loss: 2.9226551055908203\r\nStep 447, loss: 2.9139459133148193\r\nStep 448, loss: 2.9045276641845703\r\nStep 449, loss: 2.894792079925537\r\nStep 450, loss: 2.8859612941741943\r\nStep 451, loss: 2.8762094974517822\r\nStep 452, loss: 2.8667941093444824\r\nStep 453, loss: 2.8569629192352295\r\nStep 454, loss: 2.8479533195495605\r\nStep 455, loss: 2.838305950164795\r\nStep 456, loss: 2.829066038131714\r\nStep 457, loss: 2.819387197494507\r\nStep 458, loss: 2.8093886375427246\r\nStep 459, loss: 2.800187587738037\r\nStep 460, loss: 2.7901248931884766\r\nStep 461, loss: 2.780832290649414\r\nStep 462, loss: 2.7714550495147705\r\nStep 463, loss: 2.762202024459839\r\nStep 464, loss: 2.7535359859466553\r\nStep 465, loss: 2.744499444961548\r\nStep 466, loss: 2.733633518218994\r\nStep 467, loss: 2.7247514724731445\r\nStep 468, loss: 2.7162628173828125\r\nStep 469, loss: 2.7063894271850586\r\nStep 470, loss: 2.697354555130005\r\nStep 471, loss: 2.6882095336914062\r\nStep 472, loss: 2.678776979446411\r\nStep 473, loss: 2.6701743602752686\r\nStep 474, loss: 2.660641670227051\r\nStep 475, loss: 2.6510348320007324\r\n
null
terminal_output
662
2,087,523
TERMINAL
0
0
Step 476, loss: 2.642510175704956\r\nStep 477, loss: 2.63261079788208\r\nStep 478, loss: 2.623910427093506\r\nStep 479, loss: 2.6148018836975098\r\nStep 480, loss: 2.6045711040496826\r\nStep 481, loss: 2.5958633422851562\r\nStep 482, loss: 2.5867292881011963\r\nStep 483, loss: 2.577667713165283\r\nStep 484, loss: 2.5676422119140625\r\nStep 485, loss: 2.5583274364471436\r\nStep 486, loss: 2.5490875244140625\r\nStep 487, loss: 2.5397307872772217\r\nStep 488, loss: 2.5303761959075928\r\nStep 489, loss: 2.5207362174987793\r\nStep 490, loss: 2.511890411376953\r\nStep 491, loss: 2.5031535625457764\r\nStep 492, loss: 2.4956369400024414\r\nStep 493, loss: 2.4856512546539307\r\nStep 494, loss: 2.475015878677368\r\nStep 495, loss: 2.467982769012451\r\nStep 496, loss: 2.4585630893707275\r\nStep 497, loss: 2.4493839740753174\r\nStep 498, loss: 2.4403648376464844\r\nStep 499, loss: 2.4309475421905518\r\nStep 500, loss: 2.422694683074951\r\nStep 501, loss: 2.4136173725128174\r\nStep 502, loss: 2.4044888019561768\r\nStep 503, loss: 2.395998001098633\r\nStep 504, loss: 2.3866400718688965\r\nStep 505, loss: 2.3780908584594727\r\nStep 506, loss: 2.3690168857574463\r\nStep 507, loss: 2.3596842288970947\r\nStep 508, loss: 2.351391077041626\r\nStep 509, loss: 2.341170072555542\r\nStep 510, loss: 2.3332841396331787\r\nStep 511, loss: 2.3240034580230713\r\nStep 512, loss: 2.315258264541626\r\nStep 513, loss: 2.3064169883728027\r\nStep 514, loss: 2.2964682579040527\r\nStep 515, loss: 2.2885804176330566\r\nStep 516, loss: 2.2798867225646973\r\nStep 517, loss: 2.271151542663574\r\nStep 518, loss: 2.261836528778076\r\nStep 519, loss: 2.2530510425567627\r\nStep 520, loss: 2.2438204288482666\r\nStep 521, loss: 2.2360198497772217\r\nStep 522, loss: 2.2259392738342285\r\nStep 523, loss: 2.21750807762146\r\nStep 524, loss: 2.2093539237976074\r\nStep 525, loss: 2.1993319988250732\r\nStep 526, loss: 2.191265821456909\r\nStep 527, loss: 2.18232798576355\r\nStep 528, loss: 2.17352557182312\r\nStep 529, loss: 2.1645965576171875\r\nStep 530, loss: 2.155613899230957\r\nStep 531, loss: 2.1466257572174072\r\nStep 532, loss: 2.137315034866333\r\nStep 533, loss: 2.1294357776641846\r\nStep 534, loss: 2.119992256164551\r\nStep 535, loss: 2.1128017902374268\r\nStep 536, loss: 2.104088306427002\r\nStep 537, loss: 2.094069242477417\r\nStep 538, loss: 2.085700750350952\r\nStep 539, loss: 2.0780234336853027\r\nStep 540, loss: 2.0682499408721924\r\nStep 541, loss: 2.060723066329956\r\nStep 542, loss: 2.051548719406128\r\nStep 543, loss: 2.0432472229003906\r\nStep 544, loss: 2.034942865371704\r\nStep 545, loss: 2.0253207683563232\r\nStep 546, loss: 2.018012762069702\r\nStep 547, loss: 2.009068727493286\r\nStep 548, loss: 2.00166916847229\r\nStep 549, loss: 1.9926462173461914\r\nStep 550, loss: 1.9838670492172241\r\nStep 551, loss: 1.9753228425979614\r\nStep 552, loss: 1.9683812856674194\r\nStep 553, loss: 1.9595160484313965\r\nStep 554, loss: 1.9515299797058105\r\nStep 555, loss: 1.9421453475952148\r\nStep 556, loss: 1.9354835748672485\r\nStep 557, loss: 1.9266985654830933\r\nStep 558, loss: 1.9188404083251953\r\nStep 559, loss: 1.9097076654434204\r\nStep 560, loss: 1.9026893377304077\r\nStep 561, loss: 1.8926920890808105\r\nStep 562, loss: 1.8853564262390137\r\nStep 563, loss: 1.8766025304794312\r\nStep 564, loss: 1.8697885274887085\r\nStep 565, loss: 1.8593159914016724\r\nStep 566, loss: 1.8541855812072754\r\nStep 567, loss: 1.845725655555725\r\nStep 568, loss: 1.8373762369155884\r\nStep 569, loss: 1.8290959596633911\r\nStep 570, loss: 1.8231853246688843\r\nStep 571, loss: 1.8120434284210205\r\nStep 572, loss: 1.806404948234558\r\nStep 573, loss: 1.797046184539795\r\nStep 574, loss: 1.789894938468933\r\nStep 575, loss: 1.7818406820297241\r\nStep 576, loss: 1.7736821174621582\r\nStep 577, loss: 1.7659478187561035\r\nStep 578, loss: 1.7581696510314941\r\nStep 579, loss: 1.749375581741333\r\nStep 580, loss: 1.7428193092346191\r\nStep 581, loss: 1.733798861503601\r\nStep 582, loss: 1.7262483835220337\r\nStep 583, loss: 1.7176529169082642\r\nStep 584, loss: 1.7106640338897705\r\nStep 585, loss: 1.7031172513961792\r\nStep 586, loss: 1.6962319612503052\r\nStep 587, loss: 1.6861025094985962\r\nStep 588, loss: 1.6786580085754395\r\nStep 589, loss: 1.6713743209838867\r\nStep 590, loss: 1.6627628803253174\r\nStep 591, loss: 1.6550942659378052\r\nStep 592, loss: 1.6463648080825806\r\nStep 593, loss: 1.638795256614685\r\nStep 594, loss: 1.6306997537612915\r\nStep 595, loss: 1.6227972507476807\r\nStep 596, loss: 1.6149439811706543\r\nStep 597, loss: 1.6068694591522217\r\nStep 598, loss: 1.59817636013031\r\nStep 599, loss: 1.5905296802520752\r\nStep 600, loss: 1.5824655294418335\r\nStep 601, loss: 1.5744991302490234\r\nStep 602, loss: 1.5656940937042236\r\nStep 603, loss: 1.5582789182662964\r\nStep 604, loss: 1.5507293939590454\r\nStep 605, loss: 1.5437891483306885\r\nStep 606, loss: 1.5346840620040894\r\nStep 607, loss: 1.527217149734497\r\nStep 608, loss: 1.5205581188201904\r\nStep 609, loss: 1.5107218027114868\r\nStep 610, loss: 1.5037164688110352\r\nStep 611, loss: 1.4973241090774536\r\nStep 612, loss: 1.4878110885620117\r\nStep 613, loss: 1.480774164199829\r\nStep 614, loss: 1.4735666513442993\r\nStep 615, loss: 1.464660882949829\r\nStep 616, loss: 1.4574377536773682\r\nStep 617, loss: 1.4505798816680908\r\nStep 618, loss: 1.4418940544128418\r\nStep 619, loss: 1.4341411590576172\r\nStep 620, loss: 1.426429271697998\r\nStep 621, loss: 1.418775200843811\r\nStep 622, loss: 1.4114100933074951\r\nStep 623, loss: 1.403503656387329\r\nStep 624, loss: 1.3958673477172852\r\nStep 625, loss: 1.3888076543807983\r\nStep 626, loss: 1.3819773197174072\r\nStep 627, loss: 1.3730348348617554\r\nStep 628, loss: 1.3648275136947632\r\nStep 629, loss: 1.3591113090515137\r\nStep 630, loss: 1.3514671325683594\r\nStep 631, loss: 1.3429616689682007\r\nStep 632, loss: 1.336741328239441\r\nStep 633, loss: 1.3287346363067627\r\nStep 634, loss: 1.3211055994033813\r\nStep 635, loss: 1.3139384984970093\r\nStep 636, loss: 1.3068573474884033\r\nStep 637, loss: 1.2987180948257446\r\nStep 638, loss: 1.2922712564468384\r\nStep 639, loss: 1.2857189178466797\r\nStep 640, loss: 1.277501106262207\r\nStep 641, loss: 1.2692525386810303\r\nStep 642, loss: 1.2645974159240723\r\nStep 643, loss: 1.2561060190200806\r\nStep 644, loss: 1.2487590312957764\r\nStep 645, loss: 1.2429131269454956\r\nStep 646, loss: 1.2351661920547485\r\nStep 647, loss: 1.227660059928894\r\nStep 648, loss: 1.2207943201065063\r\nStep 649, loss: 1.2160868644714355\r\nStep 650, loss: 1.2066915035247803\r\nStep 651, loss: 1.2000283002853394\r\nStep 652, loss: 1.1937106847763062\r\nStep 653, loss: 1.185746669769287\r\nStep 654, loss: 1.1797198057174683\r\nStep 655, loss: 1.17323899269104\r\nStep 656, loss: 1.1656289100646973\r\nStep 657, loss: 1.1582473516464233\r\nStep 658, loss: 1.1523079872131348\r\nStep 659, loss: 1.145919919013977\r\nStep 660, loss: 1.1396456956863403\r\nStep 661, loss: 1.1327812671661377\r\nStep 662, loss: 1.1252888441085815\r\nStep 663, loss: 1.1179755926132202\r\nStep 664, loss: 1.113598108291626\r\nStep 665, loss: 1.1053816080093384\r\nStep 666, loss: 1.0982372760772705\r\nStep 667, loss: 1.0929498672485352\r\nStep 668, loss: 1.0850220918655396\r\nStep 669, loss: 1.0782496929168701\r\nStep 670, loss: 1.0732945203781128\r\nStep 671, loss: 1.0655406713485718\r\nStep 672, loss: 1.0581672191619873\r\nStep 673, loss: 1.0524147748947144\r\nStep 674, loss: 1.0455681085586548\r\nStep 675, loss: 1.039129376411438\r\nStep 676, loss: 1.0331910848617554\r\nStep 677, loss: 1.0269386768341064\r\nStep 678, loss: 1.0203137397766113\r\nStep 679, loss: 1.0132237672805786\r\nStep 680, loss: 1.007102370262146\r\nStep 681, loss: 0.9999140501022339\r\nStep 682, loss: 0.9938558340072632\r\nStep 683, loss: 0.9873496294021606\r\nStep 684, loss: 0.9803993701934814\r\nStep 685, loss: 0.9749471545219421\r\nStep 686, loss: 0.96750408411026\r\nStep 687, loss: 0.9635863900184631\r\nStep 688, loss: 0.9571389555931091\r\nStep 689, loss: 0.9535318613052368\r\nStep 690, loss: 0.944150984287262\r\nStep 691, loss: 0.9393853545188904\r\nStep 692, loss: 0.9311679601669312\r\nStep 693, loss: 0.9260869026184082\r\nStep 694, loss: 0.9199948310852051\r\nStep 695, loss: 0.913409411907196\r\nStep 696, loss: 0.9085953235626221\r\nStep 697, loss: 0.9009783267974854\r\nStep 698, loss: 0.8970188498497009\r\nStep 699, loss: 0.8895894885063171\r\nStep 700, loss: 0.8855931162834167\r\nStep 701, loss: 0.8806162476539612\r\nStep 702, loss: 0.8717113733291626\r\nStep 703, loss: 0.8685153722763062\r\nStep 704, loss: 0.8619484305381775\r\nStep 705, loss: 0.8548940420150757\r\nStep 706, loss: 0.8508508801460266\r\nStep 707, loss: 0.8445050716400146\r\nStep 708, loss: 0.8392189145088196\r\nStep 709, loss: 0.8335049748420715\r\nStep 710, loss: 0.8282254338264465\r\n
null
terminal_output
663
2,108,217
TERMINAL
0
0
Step 711, loss: 0.8217080235481262\r\nStep 712, loss: 0.8165726661682129\r\nStep 713, loss: 0.8109076023101807\r\nStep 714, loss: 0.8044532537460327\r\nStep 715, loss: 0.7988839745521545\r\nStep 716, loss: 0.7937924265861511\r\nStep 717, loss: 0.7878305912017822\r\nStep 718, loss: 0.7830884456634521\r\nStep 719, loss: 0.7775300145149231\r\nStep 720, loss: 0.7719880938529968\r\nStep 721, loss: 0.7663823366165161\r\nStep 722, loss: 0.7605624794960022\r\nStep 723, loss: 0.7544004917144775\r\nStep 724, loss: 0.7499596476554871\r\nStep 725, loss: 0.7449225187301636\r\nStep 726, loss: 0.7408409118652344\r\nStep 727, loss: 0.7363877296447754\r\nStep 728, loss: 0.7284979224205017\r\nStep 729, loss: 0.7239687442779541\r\nStep 730, loss: 0.7183437943458557\r\nStep 731, loss: 0.7137470841407776\r\nStep 732, loss: 0.7076718807220459\r\nStep 733, loss: 0.7015669941902161\r\nStep 734, loss: 0.6976932287216187\r\nStep 735, loss: 0.6920550465583801\r\nStep 736, loss: 0.6869915723800659\r\nStep 737, loss: 0.6808256506919861\r\nStep 738, loss: 0.6767962574958801\r\nStep 739, loss: 0.6717007756233215\r\nStep 740, loss: 0.6665679812431335\r\nStep 741, loss: 0.6612553000450134\r\nStep 742, loss: 0.6563907861709595\r\nStep 743, loss: 0.6511380076408386\r\nStep 744, loss: 0.6466406583786011\r\nStep 745, loss: 0.6437736749649048\r\nStep 746, loss: 0.6367356777191162\r\nStep 747, loss: 0.6305527091026306\r\nStep 748, loss: 0.6266058683395386\r\nStep 749, loss: 0.6217530965805054\r\nStep 750, loss: 0.6167359352111816\r\nStep 751, loss: 0.6124680042266846\r\nStep 752, loss: 0.6066867709159851\r\nStep 753, loss: 0.6020199656486511\r\nStep 754, loss: 0.5965709090232849\r\nStep 755, loss: 0.592445969581604\r\nStep 756, loss: 0.5881512761116028\r\nStep 757, loss: 0.5836131572723389\r\nStep 758, loss: 0.5787907242774963\r\nStep 759, loss: 0.573790431022644\r\nStep 760, loss: 0.5693543553352356\r\nStep 761, loss: 0.5681652426719666\r\nStep 762, loss: 0.5674947500228882\r\nStep 763, loss: 0.5567172169685364\r\nStep 764, loss: 0.5575106739997864\r\nStep 765, loss: 0.548179566860199\r\nStep 766, loss: 0.5479695200920105\r\nStep 767, loss: 0.5398564338684082\r\nStep 768, loss: 0.5381828546524048\r\nStep 769, loss: 0.5311191082000732\r\nStep 770, loss: 0.5292914509773254\r\nStep 771, loss: 0.5232307314872742\r\nStep 772, loss: 0.520074188709259\r\nStep 773, loss: 0.5156713128089905\r\nStep 774, loss: 0.511364221572876\r\nStep 775, loss: 0.5070962309837341\r\nStep 776, loss: 0.502781867980957\r\nStep 777, loss: 0.4995824098587036\r\nStep 778, loss: 0.4945211410522461\r\nStep 779, loss: 0.4922212064266205\r\nStep 780, loss: 0.4862660765647888\r\nStep 781, loss: 0.48393452167510986\r\nStep 782, loss: 0.4794307351112366\r\nStep 783, loss: 0.4755706787109375\r\nStep 784, loss: 0.47356724739074707\r\nStep 785, loss: 0.46796754002571106\r\nStep 786, loss: 0.46420928835868835\r\nStep 787, loss: 0.4597548246383667\r\nStep 788, loss: 0.45774367451667786\r\nStep 789, loss: 0.4530315697193146\r\nStep 790, loss: 0.44892415404319763\r\nStep 791, loss: 0.44450843334198\r\nStep 792, loss: 0.44253072142601013\r\nStep 793, loss: 0.4387081563472748\r\nStep 794, loss: 0.43446189165115356\r\nStep 795, loss: 0.4304324984550476\r\nStep 796, loss: 0.42715081572532654\r\nStep 797, loss: 0.42237555980682373\r\nStep 798, loss: 0.41951921582221985\r\nStep 799, loss: 0.4148396849632263\r\nStep 800, loss: 0.4121858477592468\r\nStep 801, loss: 0.4088435769081116\r\nStep 802, loss: 0.4042735695838928\r\nStep 803, loss: 0.4010925889015198\r\nStep 804, loss: 0.3982788026332855\r\nStep 805, loss: 0.3947482705116272\r\nStep 806, loss: 0.3918812870979309\r\nStep 807, loss: 0.3894217908382416\r\nStep 808, loss: 0.3847247362136841\r\nStep 809, loss: 0.3802025020122528\r\nStep 810, loss: 0.37989234924316406\r\nStep 811, loss: 0.37599483132362366\r\nStep 812, loss: 0.37114477157592773\r\nStep 813, loss: 0.3696782886981964\r\nStep 814, loss: 0.3665175437927246\r\nStep 815, loss: 0.36217549443244934\r\nStep 816, loss: 0.35946720838546753\r\nStep 817, loss: 0.35535821318626404\r\nStep 818, loss: 0.3539041578769684\r\nStep 819, loss: 0.3504786491394043\r\nStep 820, loss: 0.3458513915538788\r\nStep 821, loss: 0.345105916261673\r\nStep 822, loss: 0.33974865078926086\r\nStep 823, loss: 0.33818066120147705\r\nStep 824, loss: 0.33452001214027405\r\nStep 825, loss: 0.332203209400177\r\nStep 826, loss: 0.3284471333026886\r\nStep 827, loss: 0.32525530457496643\r\nStep 828, loss: 0.32212960720062256\r\nStep 829, loss: 0.32014429569244385\r\nStep 830, loss: 0.3170332610607147\r\nStep 831, loss: 0.3145029544830322\r\nStep 832, loss: 0.3122105896472931\r\nStep 833, loss: 0.3082149922847748\r\nStep 834, loss: 0.3050556778907776\r\nStep 835, loss: 0.3033985495567322\r\nStep 836, loss: 0.299150675535202\r\nStep 837, loss: 0.29744651913642883\r\nStep 838, loss: 0.294050008058548\r\nStep 839, loss: 0.29091981053352356\r\nStep 840, loss: 0.2886221408843994\r\nStep 841, loss: 0.28577083349227905\r\nStep 842, loss: 0.2835491895675659\r\nStep 843, loss: 0.28013232350349426\r\nStep 844, loss: 0.27792537212371826\r\nStep 845, loss: 0.2764488160610199\r\nStep 846, loss: 0.2730977237224579\r\nStep 847, loss: 0.2694430947303772\r\nStep 848, loss: 0.2672766447067261\r\nStep 849, loss: 0.2666090726852417\r\nStep 850, loss: 0.2649845778942108\r\nStep 851, loss: 0.2593952715396881\r\nStep 852, loss: 0.2584804892539978\r\nStep 853, loss: 0.25723397731781006\r\nStep 854, loss: 0.25188279151916504\r\nStep 855, loss: 0.2507297694683075\r\nStep 856, loss: 0.24848595261573792\r\nStep 857, loss: 0.24454468488693237\r\nStep 858, loss: 0.24419398605823517\r\nStep 859, loss: 0.2412576675415039\r\nStep 860, loss: 0.23829694092273712\r\nStep 861, loss: 0.2364361435174942\r\nStep 862, loss: 0.2335495948791504\r\nStep 863, loss: 0.2317638099193573\r\nStep 864, loss: 0.22863593697547913\r\nStep 865, loss: 0.22783450782299042\r\nStep 866, loss: 0.22513161599636078\r\nStep 867, loss: 0.2226179987192154\r\nStep 868, loss: 0.22165381908416748\r\nStep 869, loss: 0.2177145928144455\r\nStep 870, loss: 0.21694634854793549\r\nStep 871, loss: 0.21358415484428406\r\nStep 872, loss: 0.21330036222934723\r\nStep 873, loss: 0.20985034108161926\r\nStep 874, loss: 0.20798063278198242\r\nStep 875, loss: 0.20720070600509644\r\nStep 876, loss: 0.20394498109817505\r\nStep 877, loss: 0.20213158428668976\r\nStep 878, loss: 0.20161831378936768\r\nStep 879, loss: 0.19893546402454376\r\nStep 880, loss: 0.19689379632472992\r\nStep 881, loss: 0.19605840742588043\r\nStep 882, loss: 0.19173574447631836\r\nStep 883, loss: 0.19107890129089355\r\nStep 884, loss: 0.18824002146720886\r\nStep 885, loss: 0.18659928441047668\r\nStep 886, loss: 0.18426238000392914\r\nStep 887, loss: 0.1828402429819107\r\nStep 888, loss: 0.18153713643550873\r\nStep 889, loss: 0.18004541099071503\r\nStep 890, loss: 0.17923569679260254\r\nStep 891, loss: 0.1771780252456665\r\nStep 892, loss: 0.1737934798002243\r\nStep 893, loss: 0.1738079935312271\r\nStep 894, loss: 0.17082442343235016\r\nStep 895, loss: 0.16923661530017853\r\nStep 896, loss: 0.16794100403785706\r\nStep 897, loss: 0.16499879956245422\r\nStep 898, loss: 0.1646275371313095\r\nStep 899, loss: 0.1617223024368286\r\nStep 900, loss: 0.1601634919643402\r\nStep 901, loss: 0.15855005383491516\r\nStep 902, loss: 0.15682610869407654\r\nStep 903, loss: 0.15482500195503235\r\nStep 904, loss: 0.15370941162109375\r\nStep 905, loss: 0.15198446810245514\r\nStep 906, loss: 0.1504742056131363\r\nStep 907, loss: 0.14852607250213623\r\nStep 908, loss: 0.14718694984912872\r\nStep 909, loss: 0.1457008719444275\r\nStep 910, loss: 0.14497193694114685\r\nStep 911, loss: 0.14276361465454102\r\nStep 912, loss: 0.1421838253736496\r\nStep 913, loss: 0.14211419224739075\r\nStep 914, loss: 0.1396927535533905\r\nStep 915, loss: 0.13664153218269348\r\nStep 916, loss: 0.13640844821929932\r\nStep 917, loss: 0.1351398080587387\r\nStep 918, loss: 0.13214509189128876\r\nStep 919, loss: 0.13230617344379425\r\nStep 920, loss: 0.12964898347854614\r\nStep 921, loss: 0.12912912666797638\r\nStep 922, loss: 0.127651646733284\r\nStep 923, loss: 0.12577399611473083\r\nStep 924, loss: 0.12553353607654572\r\nStep 925, loss: 0.12443098425865173\r\nStep 926, loss: 0.12249577045440674\r\nStep 927, loss: 0.12106511741876602\r\nStep 928, loss: 0.11977630853652954\r\nStep 929, loss: 0.11902184039354324\r\nStep 930, loss: 0.11860445886850357\r\nStep 931, loss: 0.11585672944784164\r\nStep 932, loss: 0.1161954253911972\r\nStep 933, loss: 0.11476494371891022\r\nStep 934, loss: 0.11214955896139145\r\nStep 935, loss: 0.11294207721948624\r\nStep 936, loss: 0.11000564694404602\r\nStep 937, loss: 0.10926644504070282\r\nStep 938, loss: 0.10795841366052628\r\nStep 939, loss: 0.10669568181037903\r\nStep 940, loss: 0.10649319738149643\r\nStep 941, loss: 0.10473640263080597\r\n
null
terminal_output
664
2,131,932
TERMINAL
0
0
Step 942, loss: 0.10284058004617691\r\nStep 943, loss: 0.10260770469903946\r\nStep 944, loss: 0.10058261454105377\r\nStep 945, loss: 0.10010762512683868\r\nStep 946, loss: 0.0989920049905777\r\nStep 947, loss: 0.09747622162103653\r\nStep 948, loss: 0.09700138121843338\r\nStep 949, loss: 0.09548023343086243\r\nStep 950, loss: 0.09451813995838165\r\nStep 951, loss: 0.09380467981100082\r\nStep 952, loss: 0.09238416701555252\r\nStep 953, loss: 0.09181223064661026\r\nStep 954, loss: 0.09069087356328964\r\nStep 955, loss: 0.0897594466805458\r\nStep 956, loss: 0.08954257518053055\r\nStep 957, loss: 0.0886593833565712\r\nStep 958, loss: 0.08869573473930359\r\nStep 959, loss: 0.08610638976097107\r\nStep 960, loss: 0.08560013025999069\r\nStep 961, loss: 0.0849563330411911\r\nStep 962, loss: 0.08312344551086426\r\nStep 963, loss: 0.08294418454170227\r\nStep 964, loss: 0.08196399360895157\r\nStep 965, loss: 0.0806659683585167\r\nStep 966, loss: 0.0800086110830307\r\nStep 967, loss: 0.07857557386159897\r\nStep 968, loss: 0.07892174273729324\r\nStep 969, loss: 0.07794471085071564\r\nStep 970, loss: 0.0764935091137886\r\nStep 971, loss: 0.075522281229496\r\nStep 972, loss: 0.07479660958051682\r\nStep 973, loss: 0.07404129952192307\r\nStep 974, loss: 0.07297063618898392\r\nStep 975, loss: 0.07214688509702682\r\nStep 976, loss: 0.07157693058252335\r\nStep 977, loss: 0.07074932008981705\r\nStep 978, loss: 0.06959939002990723\r\nStep 979, loss: 0.06938436627388\r\nStep 980, loss: 0.06846806406974792\r\nStep 981, loss: 0.06839611381292343\r\nStep 982, loss: 0.06689736247062683\r\nStep 983, loss: 0.06656885892152786\r\nStep 984, loss: 0.06565678119659424\r\nStep 985, loss: 0.06511901319026947\r\nStep 986, loss: 0.06462135910987854\r\nStep 987, loss: 0.06356603652238846\r\nStep 988, loss: 0.06287366896867752\r\nStep 989, loss: 0.062155965715646744\r\nStep 990, loss: 0.061441995203495026\r\nStep 991, loss: 0.060619451105594635\r\nStep 992, loss: 0.060587573796510696\r\nStep 993, loss: 0.05972093343734741\r\nStep 994, loss: 0.05868172645568848\r\nStep 995, loss: 0.058283090591430664\r\nStep 996, loss: 0.057746026664972305\r\nStep 997, loss: 0.05699358880519867\r\nStep 998, loss: 0.05653215944766998\r\nStep 999, loss: 0.055758606642484665\r\nSaved checkpoint at step 1000\r\nStep 1000, loss: 0.05501093342900276\r\nStep 1001, loss: 0.05454491823911667\r\nStep 1002, loss: 0.05387122556567192\r\nStep 1003, loss: 0.053106024861335754\r\nStep 1004, loss: 0.05228279531002045\r\nStep 1005, loss: 0.05187518522143364\r\nStep 1006, loss: 0.05169035866856575\r\nStep 1007, loss: 0.05150993540883064\r\nStep 1008, loss: 0.05034927651286125\r\nStep 1009, loss: 0.04985934495925903\r\nStep 1010, loss: 0.04929310828447342\r\nStep 1011, loss: 0.049011312425136566\r\nStep 1012, loss: 0.04900951310992241\r\nStep 1013, loss: 0.04826220124959946\r\nStep 1014, loss: 0.047129660844802856\r\nStep 1015, loss: 0.04729390889406204\r\nStep 1016, loss: 0.04632221534848213\r\nStep 1017, loss: 0.04612678661942482\r\nStep 1018, loss: 0.04689944535493851\r\nStep 1019, loss: 0.044806718826293945\r\nStep 1020, loss: 0.045104559510946274\r\nStep 1021, loss: 0.04412100091576576\r\nStep 1022, loss: 0.044060006737709045\r\nStep 1023, loss: 0.04344552382826805\r\nStep 1024, loss: 0.04304246976971626\r\nStep 1025, loss: 0.04260677471756935\r\nStep 1026, loss: 0.04195418953895569\r\nStep 1027, loss: 0.04113117605447769\r\nStep 1028, loss: 0.0413922481238842\r\nStep 1029, loss: 0.04028627648949623\r\nStep 1030, loss: 0.04066810756921768\r\nStep 1031, loss: 0.0395963191986084\r\nStep 1032, loss: 0.03956642374396324\r\nStep 1033, loss: 0.03864578530192375\r\nStep 1034, loss: 0.038639992475509644\r\nStep 1035, loss: 0.03762561455368996\r\nStep 1036, loss: 0.037670981138944626\r\nStep 1037, loss: 0.037099629640579224\r\nStep 1038, loss: 0.0365331806242466\r\nStep 1039, loss: 0.036564022302627563\r\nStep 1040, loss: 0.0359191820025444\r\nStep 1041, loss: 0.0356856994330883\r\nStep 1042, loss: 0.03516722097992897\r\nStep 1043, loss: 0.03471212089061737\r\nStep 1044, loss: 0.03424248844385147\r\nStep 1045, loss: 0.034030936658382416\r\nStep 1046, loss: 0.03368755802512169\r\nStep 1047, loss: 0.03346807137131691\r\nStep 1048, loss: 0.03269940987229347\r\nStep 1049, loss: 0.03244371712207794\r\nStep 1050, loss: 0.03215942531824112\r\nStep 1051, loss: 0.031605303287506104\r\nStep 1052, loss: 0.03141744062304497\r\nStep 1053, loss: 0.03101503849029541\r\nStep 1054, loss: 0.03085830807685852\r\nStep 1055, loss: 0.0305185467004776\r\nStep 1056, loss: 0.029711099341511726\r\nStep 1057, loss: 0.029728597030043602\r\nStep 1058, loss: 0.029517455026507378\r\nStep 1059, loss: 0.029007628560066223\r\nStep 1060, loss: 0.02880880795419216\r\nStep 1061, loss: 0.02873934805393219\r\nStep 1062, loss: 0.027836868539452553\r\nStep 1063, loss: 0.02787143364548683\r\nStep 1064, loss: 0.0275528896600008\r\nStep 1065, loss: 0.026962384581565857\r\nStep 1066, loss: 0.027164041996002197\r\nStep 1067, loss: 0.026582540944218636\r\nStep 1068, loss: 0.02607882395386696\r\nStep 1069, loss: 0.02598561905324459\r\nStep 1070, loss: 0.02558951824903488\r\nStep 1071, loss: 0.02551584504544735\r\nStep 1072, loss: 0.025129931047558784\r\nStep 1073, loss: 0.024805178865790367\r\nStep 1074, loss: 0.024721326306462288\r\nStep 1075, loss: 0.024159882217645645\r\nStep 1076, loss: 0.023891637101769447\r\nStep 1077, loss: 0.023627188056707382\r\nStep 1078, loss: 0.023391667753458023\r\nStep 1079, loss: 0.023272866383194923\r\nStep 1080, loss: 0.023029927164316177\r\nStep 1081, loss: 0.02272222936153412\r\nStep 1082, loss: 0.022336535155773163\r\nStep 1083, loss: 0.02218504808843136\r\nStep 1084, loss: 0.022072169929742813\r\nStep 1085, loss: 0.02167431265115738\r\nStep 1086, loss: 0.02119705080986023\r\nStep 1087, loss: 0.021123964339494705\r\nStep 1088, loss: 0.02075539343059063\r\nStep 1089, loss: 0.02061818726360798\r\nStep 1090, loss: 0.02034122124314308\r\nStep 1091, loss: 0.020141519606113434\r\nStep 1092, loss: 0.019736608490347862\r\nStep 1093, loss: 0.019700797274708748\r\nStep 1094, loss: 0.019596993923187256\r\nStep 1095, loss: 0.019521242007613182\r\nStep 1096, loss: 0.01907781884074211\r\nStep 1097, loss: 0.01864895038306713\r\nStep 1098, loss: 0.018773796036839485\r\nStep 1099, loss: 0.01880393549799919\r\nStep 1100, loss: 0.0186154805123806\r\nStep 1101, loss: 0.018351327627897263\r\nStep 1102, loss: 0.018004044890403748\r\nStep 1103, loss: 0.017644882202148438\r\nStep 1104, loss: 0.017551574856042862\r\nStep 1105, loss: 0.01723521016538143\r\nStep 1106, loss: 0.017335789278149605\r\nStep 1107, loss: 0.016952281817793846\r\nStep 1108, loss: 0.01662817783653736\r\nStep 1109, loss: 0.016695033758878708\r\nStep 1110, loss: 0.016334369778633118\r\nStep 1111, loss: 0.016277842223644257\r\nStep 1112, loss: 0.016116367653012276\r\nStep 1113, loss: 0.015882331877946854\r\nStep 1114, loss: 0.015700487419962883\r\nStep 1115, loss: 0.015370768494904041\r\nStep 1116, loss: 0.015415767207741737\r\nStep 1117, loss: 0.015184538438916206\r\nStep 1118, loss: 0.015052331611514091\r\nStep 1119, loss: 0.014788689091801643\r\nStep 1120, loss: 0.014641924761235714\r\nStep 1121, loss: 0.014552359469234943\r\nStep 1122, loss: 0.014238058589398861\r\nStep 1123, loss: 0.01421881653368473\r\nStep 1124, loss: 0.014196573756635189\r\nStep 1125, loss: 0.013915947638452053\r\nStep 1126, loss: 0.014101726934313774\r\nStep 1127, loss: 0.013905644416809082\r\nStep 1128, loss: 0.013702526688575745\r\nStep 1129, loss: 0.013285263441503048\r\nStep 1130, loss: 0.013291345909237862\r\nStep 1131, loss: 0.013438103720545769\r\nStep 1132, loss: 0.012999753467738628\r\nStep 1133, loss: 0.012816538102924824\r\nStep 1134, loss: 0.012905183248221874\r\nStep 1135, loss: 0.012812134809792042\r\nStep 1136, loss: 0.012356710620224476\r\nStep 1137, loss: 0.01244242675602436\r\nStep 1138, loss: 0.01218419335782528\r\nStep 1139, loss: 0.011963708326220512\r\nStep 1140, loss: 0.012156236916780472\r\nStep 1141, loss: 0.011928506195545197\r\nStep 1142, loss: 0.011556784622371197\r\nStep 1143, loss: 0.011564714834094048\r\nStep 1144, loss: 0.011371411383152008\r\nStep 1145, loss: 0.011371642351150513\r\nStep 1146, loss: 0.01133374497294426\r\nStep 1147, loss: 0.01096455566585064\r\nStep 1148, loss: 0.011113219894468784\r\nStep 1149, loss: 0.01120865810662508\r\nStep 1150, loss: 0.010651581920683384\r\nStep 1151, loss: 0.010794384405016899\r\nStep 1152, loss: 0.01051724050194025\r\nStep 1153, loss: 0.010538892820477486\r\nStep 1154, loss: 0.010571874678134918\r\nStep 1155, loss: 0.010171287693083286\r\nStep 1156, loss: 0.01024056226015091\r\nStep 1157, loss: 0.009947581216692924\r\nStep 1158, loss: 0.010074986144900322\r\nStep 1159, loss: 0.009830509312450886\r\nStep 1160, loss: 0.00983112957328558\r\nStep 1161, loss: 0.00969394389539957\r\n
null
terminal_output
665
2,151,515
TERMINAL
0
0
Step 1162, loss: 0.009393114596605301\r\nStep 1163, loss: 0.009469496086239815\r\nStep 1164, loss: 0.009306042455136776\r\nStep 1165, loss: 0.009130761958658695\r\nStep 1166, loss: 0.009153819642961025\r\nStep 1167, loss: 0.009008807130157948\r\nStep 1168, loss: 0.008845384232699871\r\nStep 1169, loss: 0.008844839408993721\r\nStep 1170, loss: 0.008735044859349728\r\nStep 1171, loss: 0.008685932494699955\r\nStep 1172, loss: 0.00862586498260498\r\nStep 1173, loss: 0.0084147397428751\r\nStep 1174, loss: 0.008601149544119835\r\nStep 1175, loss: 0.00881489459425211\r\nStep 1176, loss: 0.008508187718689442\r\nStep 1177, loss: 0.008219584822654724\r\nStep 1178, loss: 0.008381807245314121\r\nStep 1179, loss: 0.007943643257021904\r\nStep 1180, loss: 0.008319320157170296\r\nStep 1181, loss: 0.007897480390965939\r\nStep 1182, loss: 0.007939528673887253\r\nStep 1183, loss: 0.0077130598947405815\r\nStep 1184, loss: 0.00789724476635456\r\nStep 1185, loss: 0.007549331057816744\r\nStep 1186, loss: 0.00778029253706336\r\nStep 1187, loss: 0.007546393200755119\r\nStep 1188, loss: 0.007547583431005478\r\nStep 1189, loss: 0.007277408614754677\r\nStep 1190, loss: 0.007431786973029375\r\nStep 1191, loss: 0.007194864097982645\r\nStep 1192, loss: 0.007268082816153765\r\nStep 1193, loss: 0.00717291422188282\r\nStep 1194, loss: 0.0070748161524534225\r\nStep 1195, loss: 0.006980064790695906\r\nStep 1196, loss: 0.006897574756294489\r\nStep 1197, loss: 0.006781957112252712\r\nStep 1198, loss: 0.0068474505096673965\r\nStep 1199, loss: 0.006798754911869764\r\nStep 1200, loss: 0.006692978087812662\r\nStep 1201, loss: 0.006644172593951225\r\nStep 1202, loss: 0.006435920484364033\r\nStep 1203, loss: 0.006491462700068951\r\nStep 1204, loss: 0.006312419660389423\r\nStep 1205, loss: 0.006274277810007334\r\nStep 1206, loss: 0.006178953684866428\r\nStep 1207, loss: 0.00611974997445941\r\nStep 1208, loss: 0.006083796266466379\r\nStep 1209, loss: 0.006046587135642767\r\nStep 1210, loss: 0.006050546187907457\r\nStep 1211, loss: 0.005936582572758198\r\nStep 1212, loss: 0.005856971722096205\r\nStep 1213, loss: 0.005798487924039364\r\nStep 1214, loss: 0.0057300059124827385\r\nStep 1215, loss: 0.005688611883670092\r\nStep 1216, loss: 0.005686311051249504\r\nStep 1217, loss: 0.00559938233345747\r\nStep 1218, loss: 0.005505791399627924\r\nStep 1219, loss: 0.005407734774053097\r\nStep 1220, loss: 0.005378386937081814\r\nStep 1221, loss: 0.005313735920935869\r\nStep 1222, loss: 0.005306249484419823\r\nStep 1223, loss: 0.005329348146915436\r\nStep 1224, loss: 0.005359758157283068\r\nStep 1225, loss: 0.00526319770142436\r\nStep 1226, loss: 0.005190741270780563\r\nStep 1227, loss: 0.005185501184314489\r\nStep 1228, loss: 0.0051390486769378185\r\nStep 1229, loss: 0.005055679008364677\r\nStep 1230, loss: 0.0050778016448020935\r\nStep 1231, loss: 0.0048799533396959305\r\nStep 1232, loss: 0.004941656719893217\r\nStep 1233, loss: 0.00486087566241622\r\nStep 1234, loss: 0.004725493490695953\r\nStep 1235, loss: 0.004799279384315014\r\nStep 1236, loss: 0.004584627691656351\r\nStep 1237, loss: 0.004664603155106306\r\nStep 1238, loss: 0.004532910417765379\r\nStep 1239, loss: 0.004556725732982159\r\nStep 1240, loss: 0.004560371395200491\r\nStep 1241, loss: 0.004467666614800692\r\nStep 1242, loss: 0.004417704418301582\r\nStep 1243, loss: 0.004459671210497618\r\nStep 1244, loss: 0.004677959717810154\r\nStep 1245, loss: 0.004486409481614828\r\nStep 1246, loss: 0.004584035370498896\r\nStep 1247, loss: 0.004312606994062662\r\nStep 1248, loss: 0.004721683915704489\r\nStep 1249, loss: 0.004235804546624422\r\nStep 1250, loss: 0.004593838471919298\r\nStep 1251, loss: 0.004274077247828245\r\nStep 1252, loss: 0.004393757786601782\r\nStep 1253, loss: 0.004195457324385643\r\nStep 1254, loss: 0.004209129139780998\r\nStep 1255, loss: 0.0042000119574368\r\nStep 1256, loss: 0.004013199359178543\r\nStep 1257, loss: 0.004006926901638508\r\nStep 1258, loss: 0.004056074656546116\r\nStep 1259, loss: 0.003957782872021198\r\nStep 1260, loss: 0.0038708150386810303\r\nStep 1261, loss: 0.003886433085426688\r\nStep 1262, loss: 0.0038562219124287367\r\nStep 1263, loss: 0.0037888363003730774\r\nStep 1264, loss: 0.003724429290741682\r\nStep 1265, loss: 0.0037353932857513428\r\nStep 1266, loss: 0.0036576895508915186\r\nStep 1267, loss: 0.0036576949059963226\r\nStep 1268, loss: 0.0036363631952553988\r\nStep 1269, loss: 0.0035939214285463095\r\nStep 1270, loss: 0.0035424483940005302\r\nStep 1271, loss: 0.003509742673486471\r\nStep 1272, loss: 0.0035156304948031902\r\nStep 1273, loss: 0.003521776758134365\r\nStep 1274, loss: 0.0034145929384976625\r\nStep 1275, loss: 0.0034604044631123543\r\nStep 1276, loss: 0.003456242848187685\r\nStep 1277, loss: 0.0033756678458303213\r\nStep 1278, loss: 0.003428845899179578\r\nStep 1279, loss: 0.003467814764007926\r\nStep 1280, loss: 0.0033016144298017025\r\nStep 1281, loss: 0.0034215059131383896\r\nStep 1282, loss: 0.003289654850959778\r\nStep 1283, loss: 0.0032917680218815804\r\nStep 1284, loss: 0.0032589216716587543\r\nStep 1285, loss: 0.003193269018083811\r\nStep 1286, loss: 0.0031810635700821877\r\nStep 1287, loss: 0.00311397691257298\r\nStep 1288, loss: 0.0031468167435377836\r\nStep 1289, loss: 0.003088981145992875\r\nStep 1290, loss: 0.003027750412002206\r\nStep 1291, loss: 0.0030490639619529247\r\nStep 1292, loss: 0.0029547298327088356\r\nStep 1293, loss: 0.0030175093561410904\r\nStep 1294, loss: 0.0028865374624729156\r\nStep 1295, loss: 0.0029270192608237267\r\nStep 1296, loss: 0.0028551716823130846\r\nStep 1297, loss: 0.002821829868480563\r\nStep 1298, loss: 0.002804560586810112\r\nStep 1299, loss: 0.002792422426864505\r\nStep 1300, loss: 0.0028606983833014965\r\nStep 1301, loss: 0.0028138800989836454\r\nStep 1302, loss: 0.002740556374192238\r\nStep 1303, loss: 0.002680573845282197\r\nStep 1304, loss: 0.00271488050930202\r\nStep 1305, loss: 0.002625789726153016\r\nStep 1306, loss: 0.0026077586226165295\r\nStep 1307, loss: 0.0025873419363051653\r\nStep 1308, loss: 0.0025484771467745304\r\nStep 1309, loss: 0.0025513109285384417\r\nStep 1310, loss: 0.0025209509767591953\r\nStep 1311, loss: 0.002501119626685977\r\nStep 1312, loss: 0.002460850402712822\r\nStep 1313, loss: 0.002422658260911703\r\nStep 1314, loss: 0.0023941686376929283\r\nStep 1315, loss: 0.0023796684108674526\r\nStep 1316, loss: 0.0023657740093767643\r\nStep 1317, loss: 0.0023619181010872126\r\nStep 1318, loss: 0.0023159964475780725\r\nStep 1319, loss: 0.002314440906047821\r\nStep 1320, loss: 0.0022847349755465984\r\nStep 1321, loss: 0.0022957928013056517\r\nStep 1322, loss: 0.0022119656205177307\r\nStep 1323, loss: 0.002207001205533743\r\nStep 1324, loss: 0.0021593370474874973\r\nStep 1325, loss: 0.002172071486711502\r\nStep 1326, loss: 0.002226192969828844\r\nStep 1327, loss: 0.0023081721737980843\r\nStep 1328, loss: 0.002367952838540077\r\nStep 1329, loss: 0.0021991028916090727\r\nStep 1330, loss: 0.002307509770616889\r\nStep 1331, loss: 0.0023050387389957905\r\nStep 1332, loss: 0.0023447873536497355\r\nStep 1333, loss: 0.002155419671908021\r\nStep 1334, loss: 0.00228729541413486\r\nStep 1335, loss: 0.002154828980565071\r\nStep 1336, loss: 0.0023087337613105774\r\nStep 1337, loss: 0.002094903727993369\r\nStep 1338, loss: 0.002350137336179614\r\nStep 1339, loss: 0.0022326575126498938\r\nStep 1340, loss: 0.0022563745733350515\r\nStep 1341, loss: 0.002074867719784379\r\nStep 1342, loss: 0.0023176518734544516\r\nStep 1343, loss: 0.002054609591141343\r\nStep 1344, loss: 0.002227594144642353\r\nStep 1345, loss: 0.0022113805171102285\r\nStep 1346, loss: 0.002024437300860882\r\nStep 1347, loss: 0.0020730358082801104\r\nStep 1348, loss: 0.0020603404846042395\r\nStep 1349, loss: 0.002064422471448779\r\nStep 1350, loss: 0.001985817914828658\r\nStep 1351, loss: 0.0019793242681771517\r\nStep 1352, loss: 0.001953692873939872\r\nStep 1353, loss: 0.0019315887475386262\r\nStep 1354, loss: 0.0019173184409737587\r\nStep 1355, loss: 0.001875755493529141\r\nStep 1356, loss: 0.0018829507753252983\r\nStep 1357, loss: 0.0018584701465442777\r\nStep 1358, loss: 0.0018110644305124879\r\nStep 1359, loss: 0.0018121650209650397\r\nStep 1360, loss: 0.0017944207647815347\r\nStep 1361, loss: 0.0017343678046017885\r\nStep 1362, loss: 0.0017704972997307777\r\nStep 1363, loss: 0.0017354977317154408\r\nStep 1364, loss: 0.0017086600419133902\r\nStep 1365, loss: 0.0017156113171949983\r\nStep 1366, loss: 0.001658273278735578\r\nStep 1367, loss: 0.001663189148530364\r\nStep 1368, loss: 0.0016886154189705849\r\nStep 1369, loss: 0.0016367059433832765\r\nStep 1370, loss: 0.001669299672357738\r\nStep 1371, loss: 0.001620073919184506\r\nStep 1372, loss: 0.0015865088207647204\r\nStep 1373, loss: 0.0015859894920140505\r\nStep 1374, loss: 0.0015742001123726368\r\n
null
terminal_output
666
2,170,752
TERMINAL
0
0
Step 1375, loss: 0.0015620454214513302\r\nStep 1376, loss: 0.0015454271342605352\r\nStep 1377, loss: 0.0015678780619055033\r\nStep 1378, loss: 0.0015310747548937798\r\nStep 1379, loss: 0.0015133678680285811\r\nStep 1380, loss: 0.0015219355700537562\r\nStep 1381, loss: 0.0015017141122370958\r\nStep 1382, loss: 0.0014798096381127834\r\nStep 1383, loss: 0.0014288945822045207\r\nStep 1384, loss: 0.0014804323436692357\r\nStep 1385, loss: 0.0014169778442010283\r\nStep 1386, loss: 0.0014188949717208743\r\nStep 1387, loss: 0.001399545231834054\r\nStep 1388, loss: 0.001384945586323738\r\nStep 1389, loss: 0.0013639283133670688\r\nStep 1390, loss: 0.0013528710696846247\r\nStep 1391, loss: 0.0013322271406650543\r\nStep 1392, loss: 0.0013593877665698528\r\nStep 1393, loss: 0.0013607240980491042\r\nStep 1394, loss: 0.0014022710965946317\r\nStep 1395, loss: 0.0013529560528695583\r\nStep 1396, loss: 0.0013038046890869737\r\nStep 1397, loss: 0.0013925175881013274\r\nStep 1398, loss: 0.0014506709994748235\r\nStep 1399, loss: 0.0013252076460048556\r\nStep 1400, loss: 0.0015645093517377973\r\nStep 1401, loss: 0.0012962156906723976\r\nStep 1402, loss: 0.002066560322418809\r\nStep 1403, loss: 0.005263920407742262\r\nStep 1404, loss: 0.0018966387724503875\r\nStep 1405, loss: 0.004433366935700178\r\nStep 1406, loss: 0.002806056523695588\r\nStep 1407, loss: 0.014946104027330875\r\nStep 1408, loss: 0.0025691883638501167\r\nStep 1409, loss: 0.035218916833400726\r\nStep 1410, loss: 0.0027447494212538004\r\nStep 1411, loss: 0.0023895471822470427\r\nStep 1412, loss: 0.027200855314731598\r\nStep 1413, loss: 0.004816902801394463\r\nStep 1414, loss: 0.0034877904690802097\r\nStep 1415, loss: 0.00559215946123004\r\nStep 1416, loss: 0.008142463862895966\r\nStep 1417, loss: 0.005207132548093796\r\nStep 1418, loss: 0.004050171934068203\r\nStep 1419, loss: 0.004853872582316399\r\nStep 1420, loss: 0.003966701216995716\r\nStep 1421, loss: 0.003037256421521306\r\nStep 1422, loss: 0.0030154718551784754\r\nStep 1423, loss: 0.0030595003627240658\r\nStep 1424, loss: 0.002653771312907338\r\nStep 1425, loss: 0.002712199930101633\r\nStep 1426, loss: 0.002773202955722809\r\nStep 1427, loss: 0.0023036233615130186\r\nStep 1428, loss: 0.0022119462955743074\r\nStep 1429, loss: 0.0023911434691399336\r\nStep 1430, loss: 0.002337019657716155\r\nStep 1431, loss: 0.0021934809628874063\r\nStep 1432, loss: 0.0019765114411711693\r\nStep 1433, loss: 0.001976784085854888\r\nStep 1434, loss: 0.0019540167413651943\r\nStep 1435, loss: 0.002004594076424837\r\nStep 1436, loss: 0.0019670058973133564\r\nStep 1437, loss: 0.0018799530807882547\r\nStep 1438, loss: 0.0017907493747770786\r\nStep 1439, loss: 0.0017487325239926577\r\nStep 1440, loss: 0.001703563379123807\r\nStep 1441, loss: 0.001724748988635838\r\nStep 1442, loss: 0.0017006030539050698\r\nStep 1443, loss: 0.0016942414222285151\r\nStep 1444, loss: 0.0016689366893842816\r\nStep 1445, loss: 0.0016442383639514446\r\nStep 1446, loss: 0.0016209642635658383\r\nStep 1447, loss: 0.0015533604891970754\r\nStep 1448, loss: 0.0014944858849048615\r\nStep 1449, loss: 0.001498078228905797\r\nStep 1450, loss: 0.0014994342345744371\r\nStep 1451, loss: 0.0014891045866534114\r\nStep 1452, loss: 0.0014391926815733314\r\nStep 1453, loss: 0.0014249965315684676\r\nStep 1454, loss: 0.0014212274691089988\r\nStep 1455, loss: 0.0013855865690857172\r\nStep 1456, loss: 0.001376844709739089\r\nStep 1457, loss: 0.0013847248628735542\r\nStep 1458, loss: 0.001368040801025927\r\nStep 1459, loss: 0.0013278075493872166\r\nStep 1460, loss: 0.001317258458584547\r\nStep 1461, loss: 0.0013125936966389418\r\nStep 1462, loss: 0.0013005759101361036\r\nStep 1463, loss: 0.0012858690461143851\r\nStep 1464, loss: 0.001257104566320777\r\nStep 1465, loss: 0.0012518266448751092\r\nStep 1466, loss: 0.0012425333261489868\r\nStep 1467, loss: 0.0012310351012274623\r\nStep 1468, loss: 0.0012113444972783327\r\nStep 1469, loss: 0.00120545772369951\r\nStep 1470, loss: 0.00118406699039042\r\nStep 1471, loss: 0.0011921789264306426\r\nStep 1472, loss: 0.0011764526134356856\r\nStep 1473, loss: 0.0011597980046644807\r\nStep 1474, loss: 0.001148686627857387\r\nStep 1475, loss: 0.0011501619592308998\r\nStep 1476, loss: 0.0011365468380972743\r\nStep 1477, loss: 0.0011185038601979613\r\nStep 1478, loss: 0.0011171265505254269\r\nStep 1479, loss: 0.0011024788254871964\r\nStep 1480, loss: 0.001097066793590784\r\nStep 1481, loss: 0.0010792962275445461\r\nStep 1482, loss: 0.0010645658476278186\r\nStep 1483, loss: 0.0010749787325039506\r\nStep 1484, loss: 0.0010525848483666778\r\nStep 1485, loss: 0.0010495668975636363\r\nStep 1486, loss: 0.0010369587689638138\r\nStep 1487, loss: 0.0010235076770186424\r\nStep 1488, loss: 0.0010135771008208394\r\nStep 1489, loss: 0.0010062410729005933\r\nStep 1490, loss: 0.001000197953544557\r\nStep 1491, loss: 0.000992900226265192\r\nStep 1492, loss: 0.000979712000116706\r\nStep 1493, loss: 0.0009737501968629658\r\nStep 1494, loss: 0.0009707424906082451\r\nStep 1495, loss: 0.0009808015311136842\r\nStep 1496, loss: 0.0009661840158514678\r\nStep 1497, loss: 0.0009407761390320957\r\nStep 1498, loss: 0.0009363529970869422\r\nStep 1499, loss: 0.0009924407349899411\r\nStep 1500, loss: 0.0009174228180199862\r\nStep 1501, loss: 0.0009144610376097262\r\nStep 1502, loss: 0.0009224752429872751\r\nStep 1503, loss: 0.0008884809212759137\r\nStep 1504, loss: 0.0008940824773162603\r\nStep 1505, loss: 0.000884276581928134\r\nStep 1506, loss: 0.0008724505314603448\r\nStep 1507, loss: 0.0008729330729693174\r\nStep 1508, loss: 0.0008427618886344135\r\nStep 1509, loss: 0.0008669798262417316\r\nStep 1510, loss: 0.0008407928398810327\r\nStep 1511, loss: 0.0008532613283023238\r\nStep 1512, loss: 0.0008410926093347371\r\nStep 1513, loss: 0.0007928110426291823\r\nStep 1514, loss: 0.0008409158326685429\r\nStep 1515, loss: 0.0008328540716320276\r\nStep 1516, loss: 0.0007803189218975604\r\nStep 1517, loss: 0.0008031541365198791\r\nStep 1518, loss: 0.0007726009353064001\r\nStep 1519, loss: 0.0007722788141109049\r\nStep 1520, loss: 0.0007798648439347744\r\nStep 1521, loss: 0.0007896889583207667\r\nStep 1522, loss: 0.0007470501586794853\r\nStep 1523, loss: 0.0007366976933553815\r\nStep 1524, loss: 0.000757212343160063\r\nStep 1525, loss: 0.0007187482551671565\r\nStep 1526, loss: 0.0007681389106437564\r\nStep 1527, loss: 0.0008039849926717579\r\nStep 1528, loss: 0.0007525162072852254\r\nStep 1529, loss: 0.0007593120681121945\r\nStep 1530, loss: 0.000714841007720679\r\nStep 1531, loss: 0.0007414017454721034\r\nStep 1532, loss: 0.000692514528054744\r\nStep 1533, loss: 0.0007394911954179406\r\nStep 1534, loss: 0.0006694064359180629\r\nStep 1535, loss: 0.000747747253626585\r\nStep 1536, loss: 0.0006897809216752648\r\nStep 1537, loss: 0.0006675801705569029\r\nStep 1538, loss: 0.000671535381115973\r\nStep 1539, loss: 0.000668856780976057\r\nStep 1540, loss: 0.0006335677462629974\r\nStep 1541, loss: 0.0006540918839164078\r\nStep 1542, loss: 0.000610284274443984\r\nStep 1543, loss: 0.00061310816090554\r\nStep 1544, loss: 0.0006291834870353341\r\nStep 1545, loss: 0.0006233148160390556\r\nStep 1546, loss: 0.000600804400164634\r\nStep 1547, loss: 0.0005970756756141782\r\nStep 1548, loss: 0.0005762177752330899\r\nStep 1549, loss: 0.0006170191336423159\r\nStep 1550, loss: 0.0005581666482612491\r\nStep 1551, loss: 0.00061130675021559\r\nStep 1552, loss: 0.0006002048030495644\r\nStep 1553, loss: 0.0005743888323195279\r\nStep 1554, loss: 0.0005691359401680529\r\nStep 1555, loss: 0.0005670820828527212\r\nStep 1556, loss: 0.0005465499707497656\r\nStep 1557, loss: 0.0005500828265212476\r\nStep 1558, loss: 0.0005485228612087667\r\nStep 1559, loss: 0.0005478914245031774\r\nStep 1560, loss: 0.00056098448112607\r\nStep 1561, loss: 0.0005225483328104019\r\nStep 1562, loss: 0.0005367467529140413\r\nStep 1563, loss: 0.0005229031667113304\r\nStep 1564, loss: 0.0005164807080291212\r\nStep 1565, loss: 0.0005105609307065606\r\nStep 1566, loss: 0.0005616155685856938\r\nStep 1567, loss: 0.000499479821883142\r\nStep 1568, loss: 0.000504105060826987\r\nStep 1569, loss: 0.0004960879450663924\r\nStep 1570, loss: 0.00047881051432341337\r\nStep 1571, loss: 0.00048428928130306304\r\nStep 1572, loss: 0.0004729954816866666\r\nStep 1573, loss: 0.0004597108345478773\r\nStep 1574, loss: 0.000453652668511495\r\nStep 1575, loss: 0.00044175234506838024\r\nStep 1576, loss: 0.0004465231322683394\r\nStep 1577, loss: 0.00044713763054460287\r\nStep 1578, loss: 0.0004524269315879792\r\nStep 1579, loss: 0.0004665072774514556\r\nStep 1580, loss: 0.00044660508865490556\r\nStep 1581, loss: 0.00044424959924072027\r\nStep 1582, loss: 0.0004368418303783983\r\nStep 1583, loss: 0.00046102641499601305\r\nStep 1584, loss: 0.00043687739525921643\r\nStep 1585, loss: 0.00042125541949644685\r\n
null
terminal_output
667
2,189,925
TERMINAL
0
0
Step 1586, loss: 0.00045317475451156497\r\nStep 1587, loss: 0.00042363148531876504\r\nStep 1588, loss: 0.0004080022335983813\r\nStep 1589, loss: 0.0004214071377646178\r\nStep 1590, loss: 0.00036888624890707433\r\nStep 1591, loss: 0.00042945475433953106\r\nStep 1592, loss: 0.0003904103650711477\r\nStep 1593, loss: 0.00039629367529414594\r\nStep 1594, loss: 0.00042725479579530656\r\nStep 1595, loss: 0.00040545326191931963\r\nStep 1596, loss: 0.0006596833118237555\r\nStep 1597, loss: 0.0010559798683971167\r\nStep 1598, loss: 0.0005999964778311551\r\nStep 1599, loss: 0.0036543398164212704\r\nStep 1600, loss: 0.0009570637485012412\r\nStep 1601, loss: 0.047660477459430695\r\nStep 1602, loss: 0.0004840762703679502\r\nStep 1603, loss: 0.012310950085520744\r\nStep 1604, loss: 0.04434973746538162\r\nStep 1605, loss: 0.002001166110858321\r\nStep 1606, loss: 0.0014710257528349757\r\nStep 1607, loss: 0.01544937677681446\r\nStep 1608, loss: 0.015939615666866302\r\nStep 1609, loss: 0.005253448151051998\r\nStep 1610, loss: 0.004263800103217363\r\nStep 1611, loss: 0.005446156952530146\r\nStep 1612, loss: 0.0049955095164477825\r\nStep 1613, loss: 0.0037756923120468855\r\nStep 1614, loss: 0.0026393402367830276\r\nStep 1615, loss: 0.003056403947994113\r\nStep 1616, loss: 0.003076181747019291\r\nStep 1617, loss: 0.0031689435709267855\r\nStep 1618, loss: 0.0029601382557302713\r\nStep 1619, loss: 0.0025978346820920706\r\nStep 1620, loss: 0.0014641323359683156\r\nStep 1621, loss: 0.0013065101811662316\r\nStep 1622, loss: 0.0012769891181960702\r\nStep 1623, loss: 0.0012362179113551974\r\nStep 1624, loss: 0.001325769815593958\r\nStep 1625, loss: 0.0014564625453203917\r\nStep 1626, loss: 0.0014383230591192842\r\nStep 1627, loss: 0.0013870054390281439\r\nStep 1628, loss: 0.001190141774713993\r\nStep 1629, loss: 0.0010750088840723038\r\nStep 1630, loss: 0.0010259905830025673\r\nStep 1631, loss: 0.0009272204479202628\r\nStep 1632, loss: 0.0008711183327250183\r\nStep 1633, loss: 0.0008963988511823118\r\nStep 1634, loss: 0.0008618553401902318\r\nStep 1635, loss: 0.0008171169902198017\r\nStep 1636, loss: 0.0008131208014674485\r\nStep 1637, loss: 0.0007722990121692419\r\nStep 1638, loss: 0.000747562968172133\r\nStep 1639, loss: 0.0007458509062416852\r\nStep 1640, loss: 0.0007163126720115542\r\nStep 1641, loss: 0.0007040303898975253\r\nStep 1642, loss: 0.0006822288851253688\r\nStep 1643, loss: 0.000673751172143966\r\nStep 1644, loss: 0.000643836276140064\r\nStep 1645, loss: 0.0006033850950188935\r\nStep 1646, loss: 0.0005893292254768312\r\nStep 1647, loss: 0.0005835664924234152\r\nStep 1648, loss: 0.0005775215686298907\r\nStep 1649, loss: 0.0005790777504444122\r\nStep 1650, loss: 0.0005642733885906637\r\nStep 1651, loss: 0.0005519168917089701\r\nStep 1652, loss: 0.0005309173720888793\r\nStep 1653, loss: 0.0005234880954958498\r\nStep 1654, loss: 0.000520469038747251\r\nStep 1655, loss: 0.0005075688241049647\r\nStep 1656, loss: 0.0004991911118850112\r\nStep 1657, loss: 0.0004885246162302792\r\nStep 1658, loss: 0.0004751462838612497\r\nStep 1659, loss: 0.0004718182608485222\r\nStep 1660, loss: 0.0004739886790048331\r\nStep 1661, loss: 0.0004644420405384153\r\nStep 1662, loss: 0.0004583653644658625\r\nStep 1663, loss: 0.0004480787320062518\r\nStep 1664, loss: 0.0004416340379975736\r\nStep 1665, loss: 0.00043499961611814797\r\nStep 1666, loss: 0.00042755043250508606\r\nStep 1667, loss: 0.0004230496706441045\r\nStep 1668, loss: 0.0004139979719184339\r\nStep 1669, loss: 0.0004127553547732532\r\nStep 1670, loss: 0.000410620792536065\r\nStep 1671, loss: 0.0004019704647362232\r\nStep 1672, loss: 0.0003981523623224348\r\nStep 1673, loss: 0.00039243005448952317\r\nStep 1674, loss: 0.000385857536457479\r\nStep 1675, loss: 0.0003800912236329168\r\nStep 1676, loss: 0.00037934500142000616\r\nStep 1677, loss: 0.0003775427467189729\r\nStep 1678, loss: 0.0003735966747626662\r\nStep 1679, loss: 0.00036580004962161183\r\nStep 1680, loss: 0.00036139669828116894\r\nStep 1681, loss: 0.0003582891367841512\r\nStep 1682, loss: 0.00034914666321128607\r\nStep 1683, loss: 0.0003448462230153382\r\nStep 1684, loss: 0.00034776004031300545\r\nStep 1685, loss: 0.00034169797436334193\r\nStep 1686, loss: 0.0003350850020069629\r\nStep 1687, loss: 0.0003412895603105426\r\nStep 1688, loss: 0.00033290410647168756\r\nStep 1689, loss: 0.000329718051943928\r\nStep 1690, loss: 0.0003242602397222072\r\nStep 1691, loss: 0.0003204644308425486\r\nStep 1692, loss: 0.0003141354536637664\r\nStep 1693, loss: 0.00031306923483498394\r\nStep 1694, loss: 0.0003095706051681191\r\nStep 1695, loss: 0.00030938078998588026\r\nStep 1696, loss: 0.0002967601176351309\r\nStep 1697, loss: 0.00030251420685090125\r\nStep 1698, loss: 0.0002983211597893387\r\nStep 1699, loss: 0.0002940730773843825\r\nStep 1700, loss: 0.0002920151164289564\r\nStep 1701, loss: 0.0002911665360443294\r\nStep 1702, loss: 0.00028963646036572754\r\nStep 1703, loss: 0.00028975779423490167\r\nStep 1704, loss: 0.0002913105709012598\r\nStep 1705, loss: 0.0002789819845929742\r\nStep 1706, loss: 0.0002710912667680532\r\nStep 1707, loss: 0.0002810036239679903\r\nStep 1708, loss: 0.00026452314341440797\r\nStep 1709, loss: 0.00026637481641955674\r\nStep 1710, loss: 0.00026021699886769056\r\nStep 1711, loss: 0.0002571173245087266\r\nStep 1712, loss: 0.0002596699632704258\r\nStep 1713, loss: 0.0002550316567067057\r\nStep 1714, loss: 0.0002631903626024723\r\nStep 1715, loss: 0.0002562365843914449\r\nStep 1716, loss: 0.0002550399804022163\r\nStep 1717, loss: 0.0002642278268467635\r\nStep 1718, loss: 0.0002524023875594139\r\nStep 1719, loss: 0.00029367252136580646\r\nStep 1720, loss: 0.0003688838623929769\r\nStep 1721, loss: 0.000266178569290787\r\nStep 1722, loss: 0.0003470243245828897\r\nStep 1723, loss: 0.00024320284137502313\r\nStep 1724, loss: 0.0004230790364090353\r\nStep 1725, loss: 0.00025138549972325563\r\nStep 1726, loss: 0.00039762057713232934\r\nStep 1727, loss: 0.0002494319633115083\r\nStep 1728, loss: 0.0002768921258393675\r\nStep 1729, loss: 0.00030888704350218177\r\nStep 1730, loss: 0.0002616044075693935\r\nStep 1731, loss: 0.0002759354538284242\r\nStep 1732, loss: 0.00029224707395769656\r\nStep 1733, loss: 0.00024305793340317905\r\nStep 1734, loss: 0.00027603571652434766\r\nStep 1735, loss: 0.0002409023290965706\r\nStep 1736, loss: 0.00024279399076476693\r\nStep 1737, loss: 0.00023755084839649498\r\nStep 1738, loss: 0.00024286685220431536\r\nStep 1739, loss: 0.00022689788602292538\r\nStep 1740, loss: 0.000235857063671574\r\nStep 1741, loss: 0.00022767767950426787\r\nStep 1742, loss: 0.0002194784174207598\r\nStep 1743, loss: 0.00022041106421966106\r\nStep 1744, loss: 0.0002305201778654009\r\nStep 1745, loss: 0.00021415369701571763\r\nStep 1746, loss: 0.00021904440654907376\r\nStep 1747, loss: 0.00026960030663758516\r\nStep 1748, loss: 0.00021438277326524258\r\nStep 1749, loss: 0.0002987166226375848\r\nStep 1750, loss: 0.00022083413205109537\r\nStep 1751, loss: 0.00021557985746767372\r\nStep 1752, loss: 0.00024868195760063827\r\nStep 1753, loss: 0.000222534712520428\r\nStep 1754, loss: 0.000220973786781542\r\nStep 1755, loss: 0.00021749676670879126\r\nStep 1756, loss: 0.00020777825557161123\r\nStep 1757, loss: 0.0002050633920589462\r\nStep 1758, loss: 0.00021814368665218353\r\nStep 1759, loss: 0.00020399292407091707\r\nStep 1760, loss: 0.00020514187053777277\r\nStep 1761, loss: 0.00019758165581151843\r\nStep 1762, loss: 0.00020988643518649042\r\nStep 1763, loss: 0.00017816528270486742\r\nStep 1764, loss: 0.00019855302525684237\r\nStep 1765, loss: 0.00018718773208092898\r\nStep 1766, loss: 0.0001844959770096466\r\nStep 1767, loss: 0.0001856030576163903\r\nStep 1768, loss: 0.00019073668227065355\r\nStep 1769, loss: 0.0001991406170418486\r\nStep 1770, loss: 0.0001753499236656353\r\nStep 1771, loss: 0.00022096795146353543\r\nStep 1772, loss: 0.00038630698691122234\r\nStep 1773, loss: 0.0006003652815707028\r\nStep 1774, loss: 0.00019334194075781852\r\nStep 1775, loss: 0.0008619521977379918\r\nStep 1776, loss: 0.0004100275691598654\r\nStep 1777, loss: 0.00037802147562615573\r\nStep 1778, loss: 0.0009522411855868995\r\nStep 1779, loss: 0.0003113946586381644\r\nStep 1780, loss: 0.0005046575097367167\r\nStep 1781, loss: 0.0004856247396674007\r\nStep 1782, loss: 0.0006465542246587574\r\nStep 1783, loss: 0.00043768403702415526\r\nStep 1784, loss: 0.0003644167154561728\r\nStep 1785, loss: 0.0003490441886242479\r\nStep 1786, loss: 0.0004665420565288514\r\nStep 1787, loss: 0.000535240804310888\r\nStep 1788, loss: 0.00045096903340891004\r\nStep 1789, loss: 0.0003860594879370183\r\nStep 1790, loss: 0.0007056056056171656\r\nStep 1791, loss: 0.0003441586741246283\r\nStep 1792, loss: 0.0006393958465196192\r\nStep 1793, loss: 0.00033743304084055126\r\nStep 1794, loss: 0.00032917934004217386\r\n
null
terminal_output
668
2,211,784
TERMINAL
0
0
Step 1795, loss: 0.00041309380321763456\r\nStep 1796, loss: 0.0003256653144489974\r\nStep 1797, loss: 0.00033648250973783433\r\nStep 1798, loss: 0.00046597333857789636\r\nStep 1799, loss: 0.0002587591588962823\r\nStep 1800, loss: 0.00038320053135976195\r\nStep 1801, loss: 0.0003097375447396189\r\nStep 1802, loss: 0.0003347669553477317\r\nStep 1803, loss: 0.0002627239446155727\r\nStep 1804, loss: 0.00026980575057677925\r\nStep 1805, loss: 0.0002792503801174462\r\nStep 1806, loss: 0.00030491259531117976\r\nStep 1807, loss: 0.0002656216383911669\r\nStep 1808, loss: 0.0002620711165945977\r\nStep 1809, loss: 0.0002485739241819829\r\nStep 1810, loss: 0.00022482662461698055\r\nStep 1811, loss: 0.00022801845625508577\r\nStep 1812, loss: 0.00022989511489868164\r\nStep 1813, loss: 0.00022498024918604642\r\nStep 1814, loss: 0.00021358573576435447\r\nStep 1815, loss: 0.0002108215558109805\r\nStep 1816, loss: 0.00021712115267291665\r\nStep 1817, loss: 0.0002046241716016084\r\nStep 1818, loss: 0.0002024870627792552\r\nStep 1819, loss: 0.0001958047942025587\r\nStep 1820, loss: 0.00018349842866882682\r\nStep 1821, loss: 0.00017827693955041468\r\nStep 1822, loss: 0.000180309361894615\r\nStep 1823, loss: 0.00017557530372869223\r\nStep 1824, loss: 0.00016965085524134338\r\nStep 1825, loss: 0.00017119017138611525\r\nStep 1826, loss: 0.00017109223699662834\r\nStep 1827, loss: 0.0001651919010328129\r\nStep 1828, loss: 0.00016223240527324378\r\nStep 1829, loss: 0.00015924063336569816\r\nStep 1830, loss: 0.0001560840173624456\r\nStep 1831, loss: 0.00015665525279473513\r\nStep 1832, loss: 0.0001562782417749986\r\nStep 1833, loss: 0.00015227396215777844\r\nStep 1834, loss: 0.0001494779426138848\r\nStep 1835, loss: 0.00014491958427242935\r\nStep 1836, loss: 0.00014285891666077077\r\nStep 1837, loss: 0.00014315453881863505\r\nStep 1838, loss: 0.0001419628388248384\r\nStep 1839, loss: 0.0001404053473379463\r\nStep 1840, loss: 0.00013499656051862985\r\nStep 1841, loss: 0.00013917645264882594\r\nStep 1842, loss: 0.0001320109877269715\r\nStep 1843, loss: 0.00013303336163517088\r\nStep 1844, loss: 0.00013310542271938175\r\nStep 1845, loss: 0.00012857014371547848\r\nStep 1846, loss: 0.0001279918651562184\r\nStep 1847, loss: 0.00012768548913300037\r\nStep 1848, loss: 0.00012460537254810333\r\nStep 1849, loss: 0.00012362413690425456\r\nStep 1850, loss: 0.00012119283928768709\r\nStep 1851, loss: 0.00012258865172043443\r\nStep 1852, loss: 0.00011725588410627097\r\nStep 1853, loss: 0.00011653714318526909\r\nStep 1854, loss: 0.00011662511678878218\r\nStep 1855, loss: 0.00011928041203645989\r\nStep 1856, loss: 0.00011446599819464609\r\nStep 1857, loss: 0.00011242073378525674\r\nStep 1858, loss: 0.00011042784171877429\r\nStep 1859, loss: 0.00010934314195765182\r\nStep 1860, loss: 0.00010881640628213063\r\nStep 1861, loss: 0.00010958352504530922\r\nStep 1862, loss: 0.00010535640467423946\r\nStep 1863, loss: 0.00010438833123771474\r\nStep 1864, loss: 0.00010444900544825941\r\nStep 1865, loss: 0.00010431574628455564\r\nStep 1866, loss: 0.00010469590779393911\r\nStep 1867, loss: 0.00010304307943442836\r\nStep 1868, loss: 0.00010240152187179774\r\nStep 1869, loss: 0.00010106965055456385\r\nStep 1870, loss: 0.00010062602086691186\r\nStep 1871, loss: 0.00010384004417574033\r\nStep 1872, loss: 0.00011299338075332344\r\nStep 1873, loss: 0.00020425445109140128\r\nStep 1874, loss: 0.0009703970281407237\r\nStep 1875, loss: 0.00020501350809354335\r\nStep 1876, loss: 0.0008871708996593952\r\nStep 1877, loss: 0.000220347530557774\r\nStep 1878, loss: 0.0001514552568551153\r\nStep 1879, loss: 0.0005311493878252804\r\nStep 1880, loss: 0.00018254545284435153\r\nStep 1881, loss: 0.0002063766005448997\r\nStep 1882, loss: 0.00020580872660502791\r\nStep 1883, loss: 0.0002641376049723476\r\nStep 1884, loss: 0.00018483499297872186\r\nStep 1885, loss: 0.0002066858869511634\r\nStep 1886, loss: 0.00015859243285376579\r\nStep 1887, loss: 0.00017486722208559513\r\nStep 1888, loss: 0.00020768502145074308\r\nStep 1889, loss: 0.00016463041538372636\r\nStep 1890, loss: 0.000159415794769302\r\nStep 1891, loss: 0.0001671619975240901\r\nStep 1892, loss: 0.00016361541929654777\r\nStep 1893, loss: 0.00014684550114907324\r\nStep 1894, loss: 0.00019125209655612707\r\nStep 1895, loss: 0.0002535067906137556\r\nStep 1896, loss: 0.0006337713566608727\r\nStep 1897, loss: 0.0004025777743663639\r\nStep 1898, loss: 0.00037639483343809843\r\nStep 1899, loss: 0.0017769994447007775\r\nStep 1900, loss: 0.0003596358874347061\r\nStep 1901, loss: 0.0020131354685872793\r\nStep 1902, loss: 0.0005444188136607409\r\nStep 1903, loss: 0.010928656905889511\r\nStep 1904, loss: 0.00035728790680877864\r\nStep 1905, loss: 0.03524984046816826\r\nStep 1906, loss: 0.0004836552543565631\r\nStep 1907, loss: 0.01202648039907217\r\nStep 1908, loss: 0.030103053897619247\r\nStep 1909, loss: 0.0010342251043766737\r\nStep 1910, loss: 0.0012939833104610443\r\nStep 1911, loss: 0.019721366465091705\r\nStep 1912, loss: 0.004992340691387653\r\nStep 1913, loss: 0.001975332386791706\r\nStep 1914, loss: 0.004554453305900097\r\nStep 1915, loss: 0.003421274945139885\r\nStep 1916, loss: 0.0018403311260044575\r\nStep 1917, loss: 0.002232007682323456\r\nStep 1918, loss: 0.0029476878698915243\r\nStep 1919, loss: 0.002355358563363552\r\nStep 1920, loss: 0.001502528553828597\r\nStep 1921, loss: 0.0017092121997848153\r\nStep 1922, loss: 0.0011354658054187894\r\nStep 1923, loss: 0.0008625361369922757\r\nStep 1924, loss: 0.0008076729718595743\r\nStep 1925, loss: 0.0008162890444509685\r\nStep 1926, loss: 0.0008073421777226031\r\nStep 1927, loss: 0.0009811737108975649\r\nStep 1928, loss: 0.0008674270939081907\r\nStep 1929, loss: 0.0008214344270527363\r\nStep 1930, loss: 0.0007099769427441061\r\nStep 1931, loss: 0.0006613670848309994\r\nStep 1932, loss: 0.000559050589799881\r\nStep 1933, loss: 0.0005033796187490225\r\nStep 1934, loss: 0.0004292028024792671\r\nStep 1935, loss: 0.0003899504663422704\r\nStep 1936, loss: 0.0003590602718759328\r\nStep 1937, loss: 0.00035199528792873025\r\nStep 1938, loss: 0.000337013480020687\r\nStep 1939, loss: 0.0003411616198718548\r\nStep 1940, loss: 0.00033287241240032017\r\nStep 1941, loss: 0.0003296398790553212\r\nStep 1942, loss: 0.00031880062306299806\r\nStep 1943, loss: 0.000314241653541103\r\nStep 1944, loss: 0.0003026192425750196\r\nStep 1945, loss: 0.00028903738711960614\r\nStep 1946, loss: 0.0002780277281999588\r\nStep 1947, loss: 0.00027378747472539544\r\nStep 1948, loss: 0.00026008320855908096\r\nStep 1949, loss: 0.0002544401795603335\r\nStep 1950, loss: 0.00024393049534410238\r\nStep 1951, loss: 0.00023999650147743523\r\nStep 1952, loss: 0.0002270268596475944\r\nStep 1953, loss: 0.00021829789329785854\r\nStep 1954, loss: 0.00021055855904705822\r\nStep 1955, loss: 0.00020172944641672075\r\nStep 1956, loss: 0.00019767304183915257\r\nStep 1957, loss: 0.0001947249984368682\r\nStep 1958, loss: 0.00019109001732431352\r\nStep 1959, loss: 0.00018978673324454576\r\nStep 1960, loss: 0.00018274948524776846\r\nStep 1961, loss: 0.00017691674293018878\r\nStep 1962, loss: 0.00017216120613738894\r\nStep 1963, loss: 0.0001675546809565276\r\nStep 1964, loss: 0.0001640628615859896\r\nStep 1965, loss: 0.0001599054376129061\r\nStep 1966, loss: 0.00015876503312028944\r\nStep 1967, loss: 0.00015555390564259142\r\nStep 1968, loss: 0.00015443956363014877\r\nStep 1969, loss: 0.00015177871682681143\r\nStep 1970, loss: 0.00014794684830121696\r\nStep 1971, loss: 0.00014473118062596768\r\nStep 1972, loss: 0.00014217180432751775\r\nStep 1973, loss: 0.00014011959137860686\r\nStep 1974, loss: 0.00013753520033787936\r\nStep 1975, loss: 0.0001347391662420705\r\nStep 1976, loss: 0.00013395400310400873\r\nStep 1977, loss: 0.00013048588880337775\r\nStep 1978, loss: 0.00012868283374700695\r\nStep 1979, loss: 0.00012608520046342164\r\nStep 1980, loss: 0.00012402510037645698\r\nStep 1981, loss: 0.00012261736264918\r\nStep 1982, loss: 0.00012066977797076106\r\nStep 1983, loss: 0.00011886046559084207\r\nStep 1984, loss: 0.00011681547766784206\r\nStep 1985, loss: 0.00011567370529519394\r\nStep 1986, loss: 0.0001136993378167972\r\nStep 1987, loss: 0.00011291183182038367\r\nStep 1988, loss: 0.00011067662853747606\r\nStep 1989, loss: 0.00010986061533913016\r\nStep 1990, loss: 0.00010795108391903341\r\nStep 1991, loss: 0.00010656158701749519\r\nStep 1992, loss: 0.00010494189336895943\r\nStep 1993, loss: 0.00010378381557529792\r\nStep 1994, loss: 0.00010287944314768538\r\nStep 1995, loss: 0.00010195723007200286\r\nStep 1996, loss: 9.999631583923474e-05\r\nStep 1997, loss: 9.880777361104265e-05\r\nStep 1998, loss: 9.73841524682939e-05\r\nStep 1999, loss: 9.63257989496924e-05\r\nSaved checkpoint at step 2000\r\n
null
terminal_output
669
2,213,792
TERMINAL
0
0
wandb: \r\nwandb: 🚀 View run dynamics-causal-overfit-actionspace-1-1393544 at: https://wandb.ai/instant-uv/jafar/runs/8r0fks74\r\nwandb: Find logs at: wandb/run-20250722_181944-8r0fks74/logs\r\n
null
terminal_output
670
2,216,183
TERMINAL
0
0
\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_ckpt_dir=/storage/user/mahajanm/Projects/world-modeling/checkpoints/tokenizer_ckpt\r\ndynamics_ckpt_dir=$1\r\necho $dynamics_ckpt_dir\r\n\r\nenv | grep SLURM\r\n\r\nsrun python sample.py \\r\n --checkpoint $dynamics_ckpt_dir \\r\n --dyna_dim=128 \\r\n --dyna_num_blocks=2 \\r\n --dyna_num_heads=4 \\r\n --seq_len=2 \\r\n --num_latent_actions=1 \\r\n --start_frame=0 \\r\n --data_dir $array_records_dir\r\n\r\n# srun python sample.py \\r\n # --checkpoint $dynamics_ckpt_dir \\r\n # --start_frame=0 \\r\n # --batch_size=12 \\r\n # --seq_len=2 \\r\n # --data_dir $array_records_dir\r\nslurm/jobs/mihir/horeka/yolo-runs/sampling.sh: 5: module: not found\r\nslurm/jobs/mihir/horeka/yolo-runs/sampling.sh: 6: module: not found\r\nmkdir: cannot create directory ‘/checkpoints’: Permission denied\r\n/storage/user/mahajanm/Projects/world-modeling/checkpoints/causal/overfit-oai-sample-actionspace-1/interactive/1393544\r\nSLURM_STEP_NODELIST=node17\r\nSLURM_JOB_USER=mahajanm\r\nSLURM_JOB_GPUS=0\r\nSLURM_JOBID=1393544\r\nSLURM_PTY_PORT=39621\r\nSLURM_JOB_QOS=stud\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_SRUN_COMM_PORT=42571\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_TOPOLOGY_ADDR_PATTERN=node\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_JOB_START_TIME=1753197754\r\nSLURM_JOB_CPUS_PER_NODE=5\r\nSLURM_JOB_NAME=interactive\r\nSLURM_JOB_GID=20909\r\nSLURM_CPUS_ON_NODE=5\r\nSLURM_PROCID=0\r\nSLURM_JOB_ACCOUNT=stud\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_CONF=/var/spool/slurmd/conf-cache/slurm.conf\r\nSLURM_STEP_LAUNCHER_PORT=42571\r\nSLURM_SUBMIT_HOST=atcremers51\r\nSLURM_MPI_TYPE=none\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_NODELIST=node17\r\nSLURM_NNODES=1\r\nSLURM_JOB_ID=1393544\r\nSLURMD_NODENAME=node17\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NODELIST=node17\r\nSLURM_GTIDS=0\r\nSLURM_STEPID=4294967290\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_JOB_END_TIME=1753233754\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_PTY_WIN_ROW=27\r\nSLURM_JOB_UID=7389\r\nSLURM_CLUSTER_NAME=inf9\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_LOCALID=0\r\nSLURM_JOB_PARTITION=NORMAL\r\nSLURM_LAUNCH_NODE_IPADDR=131.159.18.70\r\nSLURMD_DEBUG=2\r\nSLURM_TASK_PID=3978593\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=node17\r\nSLURM_NPROCS=1\r\nSLURM_STEP_NUM_TASKS=1\r\nSLURM_SRUN_COMM_HOST=131.159.18.70\r\nSLURM_SUBMIT_DIR=/usr/stud/mahajanm/Projects/jafar\r\nSLURM_PTY_WIN_COL=184\r\nSLURM_STEP_ID=4294967290\r\nSLURM_NODEID=0\r\n
null
terminal_output
671
2,217,777
TERMINAL
0
0
/usr/stud/mahajanm/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type `<class 'numpy.dtype'>`, but the default value `<class 'jax.numpy.float32'>` has type `<class 'jax._src.numpy.scalar_types._ScalarMeta'>`. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/usr/stud/mahajanm/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type `<class 'numpy.dtype'>`, but the default value `<class 'jax.numpy.bfloat16'>` has type `<class 'jax._src.numpy.scalar_types._ScalarMeta'>`. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n
null
terminal_output
672
2,220,029
TERMINAL
0
0
2025-07-22 18:24:05.595241: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n
null
terminal_output
673
2,229,655
TERMINAL
0
0
2025-07-22 18:24:15.189720: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n
null
terminal_output
674
2,236,825
TERMINAL
0
0
2025-07-22 18:24:22.399459: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n
null
terminal_output
675
2,247,680
TERMINAL
0
0
2025-07-22 18:24:33.250334: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n
null
terminal_output
676
2,250,788
TERMINAL
0
0
WARNING:absl:Missing metrics for step 2000\r\nERROR:absl:File /storage/user/mahajanm/Projects/world-modeling/checkpoints/causal/overfit-oai-sample-actionspace-1/interactive/1393544/002000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 1000\r\nERROR:absl:File /storage/user/mahajanm/Projects/world-modeling/checkpoints/causal/overfit-oai-sample-actionspace-1/interactive/1393544/001000/metrics/metrics not found.\r\n
null
terminal_output
677
2,258,306
TERMINAL
0
0
2025-07-22 18:24:43.906207: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n
null
terminal_output
678
2,265,091
TERMINAL
0
0
2025-07-22 18:24:50.574897: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n
null
terminal_output
679
2,319,597
TERMINAL
0
0
autoreg sampling...\r\nSampling token 0 from frame 1\r\nSampling token 1 from frame 1\r\nSampling token 2 from frame 1\r\nSampling token 3 from frame 1\r\nSampling token 4 from frame 1\r\nSampling token 5 from frame 1\r\nSampling token 6 from frame 1\r\nSampling token 7 from frame 1\r\nSampling token 8 from frame 1\r\nSampling token 9 from frame 1\r\nSampling token 10 from frame 1\r\nSampling token 11 from frame 1\r\nSampling token 12 from frame 1\r\nSampling token 13 from frame 1\r\nSampling token 14 from frame 1\r\nSampling token 15 from frame 1\r\nSampling token 16 from frame 1\r\nSampling token 17 from frame 1\r\nSampling token 18 from frame 1\r\nSampling token 19 from frame 1\r\nSampling token 20 from frame 1\r\nSampling token 21 from frame 1\r\nSampling token 22 from frame 1\r\nSampling token 23 from frame 1\r\nSampling token 24 from frame 1\r\nSampling token 25 from frame 1\r\nSampling token 26 from frame 1\r\nSampling token 27 from frame 1\r\nSampling token 28 from frame 1\r\nSampling token 29 from frame 1\r\nSampling token 30 from frame 1\r\nSampling token 31 from frame 1\r\nSampling token 32 from frame 1\r\nSampling token 33 from frame 1\r\nSampling token 34 from frame 1\r\nSampling token 35 from frame 1\r\nSampling token 36 from frame 1\r\nSampling token 37 from frame 1\r\nSampling token 38 from frame 1\r\nSampling token 39 from frame 1\r\nSampling token 40 from frame 1\r\nSampling token 41 from frame 1\r\nSampling token 42 from frame 1\r\nSampling token 43 from frame 1\r\nSampling token 44 from frame 1\r\nSampling token 45 from frame 1\r\nSampling token 46 from frame 1\r\nSampling token 47 from frame 1\r\nSampling token 48 from frame 1\r\nSampling token 49 from frame 1\r\nSampling token 50 from frame 1\r\nSampling token 51 from frame 1\r\nSampling token 52 from frame 1\r\nSampling token 53 from frame 1\r\nSampling token 54 from frame 1\r\nSampling token 55 from frame 1\r\nSampling token 56 from frame 1\r\nSampling token 57 from frame 1\r\nSampling token 58 from frame 1\r\nSampling token 59 from frame 1\r\nSampling token 60 from frame 1\r\nSampling token 61 from frame 1\r\nSampling token 62 from frame 1\r\nSampling token 63 from frame 1\r\nSampling token 64 from frame 1\r\nSampling token 65 from frame 1\r\nSampling token 66 from frame 1\r\nSampling token 67 from frame 1\r\nSampling token 68 from frame 1\r\nSampling token 69 from frame 1\r\nSampling token 70 from frame 1\r\nSampling token 71 from frame 1\r\nSampling token 72 from frame 1\r\nSampling token 73 from frame 1\r\nSampling token 74 from frame 1\r\nSampling token 75 from frame 1\r\nSampling token 76 from frame 1\r\nSampling token 77 from frame 1\r\nSampling token 78 from frame 1\r\nSampling token 79 from frame 1\r\nSampling token 80 from frame 1\r\nSampling token 81 from frame 1\r\nSampling token 82 from frame 1\r\nSampling token 83 from frame 1\r\nSampling token 84 from frame 1\r\nSampling token 85 from frame 1\r\nSampling token 86 from frame 1\r\nSampling token 87 from frame 1\r\nSampling token 88 from frame 1\r\nSampling token 89 from frame 1\r\nSampling token 90 from frame 1\r\nSampling token 91 from frame 1\r\nSampling token 92 from frame 1\r\nSampling token 93 from frame 1\r\nSampling token 94 from frame 1\r\nSampling token 95 from frame 1\r\nSampling token 96 from frame 1\r\nSampling token 97 from frame 1\r\nSampling token 98 from frame 1\r\nSampling token 99 from frame 1\r\nSampling token 100 from frame 1\r\nSampling token 101 from frame 1\r\nSampling token 102 from frame 1\r\nSampling token 103 from frame 1\r\nSampling token 104 from frame 1\r\nSampling token 105 from frame 1\r\nSampling token 106 from frame 1\r\nSampling token 107 from frame 1\r\nSampling token 108 from frame 1\r\nSampling token 109 from frame 1\r\nSampling token 110 from frame 1\r\nSampling token 111 from frame 1\r\nSampling token 112 from frame 1\r\nSampling token 113 from frame 1\r\nSampling token 114 from frame 1\r\nSampling token 115 from frame 1\r\nSampling token 116 from frame 1\r\nSampling token 117 from frame 1\r\nSampling token 118 from frame 1\r\nSampling token 119 from frame 1\r\nSampling token 120 from frame 1\r\nSampling token 121 from frame 1\r\nSampling token 122 from frame 1\r\nSampling token 123 from frame 1\r\nSampling token 124 from frame 1\r\nSampling token 125 from frame 1\r\nSampling token 126 from frame 1\r\nSampling token 127 from frame 1\r\nSampling token 128 from frame 1\r\nSampling token 129 from frame 1\r\nSampling token 130 from frame 1\r\nSampling token 131 from frame 1\r\nSampling token 132 from frame 1\r\nSampling token 133 from frame 1\r\nSampling token 134 from frame 1\r\nSampling token 135 from frame 1\r\nSampling token 136 from frame 1\r\nSampling token 137 from frame 1\r\nSampling token 138 from frame 1\r\nSampling token 139 from frame 1\r\nSampling token 140 from frame 1\r\nSampling token 141 from frame 1\r\nSampling token 142 from frame 1\r\nSampling token 143 from frame 1\r\nSampling token 144 from frame 1\r\nSampling token 145 from frame 1\r\nSampling token 146 from frame 1\r\nSampling token 147 from frame 1\r\nSampling token 148 from frame 1\r\nSampling token 149 from frame 1\r\nSampling token 150 from frame 1\r\nSampling token 151 from frame 1\r\nSampling token 152 from frame 1\r\nSampling token 153 from frame 1\r\nSampling token 154 from frame 1\r\nSampling token 155 from frame 1\r\nSampling token 156 from frame 1\r\nSampling token 157 from frame 1\r\nSampling token 158 from frame 1\r\nSampling token 159 from frame 1\r\nSampling token 160 from frame 1\r\nSampling token 161 from frame 1\r\nSampling token 162 from frame 1\r\nSampling token 163 from frame 1\r\nSampling token 164 from frame 1\r\nSampling token 165 from frame 1\r\nSampling token 166 from frame 1\r\nSampling token 167 from frame 1\r\nSampling token 168 from frame 1\r\nSampling token 169 from frame 1\r\nSampling token 170 from frame 1\r\nSampling token 171 from frame 1\r\nSampling token 172 from frame 1\r\nSampling token 173 from frame 1\r\nSampling token 174 from frame 1\r\nSampling token 175 from frame 1\r\nSampling token 176 from frame 1\r\nSampling token 177 from frame 1\r\nSampling token 178 from frame 1\r\nSampling token 179 from frame 1\r\nSampling token 180 from frame 1\r\nSampling token 181 from frame 1\r\nSampling token 182 from frame 1\r\nSampling token 183 from frame 1\r\nSampling token 184 from frame 1\r\nSampling token 185 from frame 1\r\nSampling token 186 from frame 1\r\nSampling token 187 from frame 1\r\nSampling token 188 from frame 1\r\nSampling token 189 from frame 1\r\nSampling token 190 from frame 1\r\nSampling token 191 from frame 1\r\nSampling token 192 from frame 1\r\nSampling token 193 from frame 1\r\nSampling token 194 from frame 1\r\nSampling token 195 from frame 1\r\nSampling token 196 from frame 1\r\nSampling token 197 from frame 1\r\nSampling token 198 from frame 1\r\nSampling token 199 from frame 1\r\nSampling token 200 from frame 1\r\nSampling token 201 from frame 1\r\nSampling token 202 from frame 1\r\nSampling token 203 from frame 1\r\nSampling token 204 from frame 1\r\nSampling token 205 from frame 1\r\nSampling token 206 from frame 1\r\nSampling token 207 from frame 1\r\nSampling token 208 from frame 1\r\nSampling token 209 from frame 1\r\nSampling token 210 from frame 1\r\nSampling token 211 from frame 1\r\nSampling token 212 from frame 1\r\nSampling token 213 from frame 1\r\nSampling token 214 from frame 1\r\nSampling token 215 from frame 1\r\nSampling token 216 from frame 1\r\nSampling token 217 from frame 1\r\nSampling token 218 from frame 1\r\nSampling token 219 from frame 1\r\nSampling token 220 from frame 1\r\nSampling token 221 from frame 1\r\nSampling token 222 from frame 1\r\nSampling token 223 from frame 1\r\nSampling token 224 from frame 1\r\nSampling token 225 from frame 1\r\nSampling token 226 from frame 1\r\nSampling token 227 from frame 1\r\nSampling token 228 from frame 1\r\nSampling token 229 from frame 1\r\nSampling token 230 from frame 1\r\nSampling token 231 from frame 1\r\nSampling token 232 from frame 1\r\nSampling token 233 from frame 1\r\nSampling token 234 from frame 1\r\nSampling token 235 from frame 1\r\nSampling token 236 from frame 1\r\nSampling token 237 from frame 1\r\nSampling token 238 from frame 1\r\nSampling token 239 from frame 1\r\nSampling token 240 from frame 1\r\nSampling token 241 from frame 1\r\nSampling token 242 from frame 1\r\nSampling token 243 from frame 1\r\nSampling token 244 from frame 1\r\nSampling token 245 from frame 1\r\nSampling token 246 from frame 1\r\nSampling token 247 from frame 1\r\nSampling token 248 from frame 1\r\nSampling token 249 from frame 1\r\nSampling token 250 from frame 1\r\nSampling token 251 from frame 1\r\nSampling token 252 from frame 1\r\nSampling token 253 from frame 1\r\nSampling token 254 from frame 1\r\nSampling token 255 from frame 1\r\nSampling token 256 from frame 1\r\nSampling token 257 from frame 1\r\n
null
terminal_output